From 80ebc8ae258d13ad2a05d5dfca0b986df1c18830 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 20:58:44 +0100 Subject: [PATCH 1/7] Upgrade CacheControl to 0.14.0 --- news/CacheControl.vendor.rst | 1 + pyproject.toml | 1 - src/pip/_vendor/cachecontrol/__init__.py | 2 +- src/pip/_vendor/cachecontrol/adapter.py | 10 +-- .../_vendor/cachecontrol/caches/file_cache.py | 7 +- src/pip/_vendor/cachecontrol/controller.py | 7 +- src/pip/_vendor/cachecontrol/heuristics.py | 2 +- src/pip/_vendor/cachecontrol/serialize.py | 76 ++----------------- src/pip/_vendor/vendor.txt | 2 +- 9 files changed, 27 insertions(+), 81 deletions(-) create mode 100644 news/CacheControl.vendor.rst diff --git a/news/CacheControl.vendor.rst b/news/CacheControl.vendor.rst new file mode 100644 index 00000000000..f6132b8332e --- /dev/null +++ b/news/CacheControl.vendor.rst @@ -0,0 +1 @@ +Upgrade CacheControl to 0.14.0 diff --git a/pyproject.toml b/pyproject.toml index 74a7f71ca59..6d1f48c11a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,7 +144,6 @@ distro = [] setuptools = "pkg_resources" [tool.vendoring.license.fallback-urls] -CacheControl = "https://raw.githubusercontent.com/ionrock/cachecontrol/v0.12.6/LICENSE.txt" distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" diff --git a/src/pip/_vendor/cachecontrol/__init__.py b/src/pip/_vendor/cachecontrol/__init__.py index 4d20bc9b12a..b34b0fcbd40 100644 --- a/src/pip/_vendor/cachecontrol/__init__.py +++ b/src/pip/_vendor/cachecontrol/__init__.py @@ -8,7 +8,7 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.13.1" +__version__ = "0.14.0" from pip._vendor.cachecontrol.adapter import CacheControlAdapter from pip._vendor.cachecontrol.controller import CacheController diff --git a/src/pip/_vendor/cachecontrol/adapter.py b/src/pip/_vendor/cachecontrol/adapter.py index 3e83e308dba..fbb4ecc8876 100644 --- a/src/pip/_vendor/cachecontrol/adapter.py +++ b/src/pip/_vendor/cachecontrol/adapter.py @@ -125,21 +125,21 @@ def build_response( else: # Wrap the response file with a wrapper that will cache the # response when the stream has been consumed. - response._fp = CallbackFileWrapper( # type: ignore[attr-defined] - response._fp, # type: ignore[attr-defined] + response._fp = CallbackFileWrapper( # type: ignore[assignment] + response._fp, # type: ignore[arg-type] functools.partial( self.controller.cache_response, request, response ), ) if response.chunked: - super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined] + super_update_chunk_length = response._update_chunk_length def _update_chunk_length(self: HTTPResponse) -> None: super_update_chunk_length() if self.chunk_left == 0: - self._fp._close() # type: ignore[attr-defined] + self._fp._close() # type: ignore[union-attr] - response._update_chunk_length = types.MethodType( # type: ignore[attr-defined] + response._update_chunk_length = types.MethodType( # type: ignore[method-assign] _update_chunk_length, response ) diff --git a/src/pip/_vendor/cachecontrol/caches/file_cache.py b/src/pip/_vendor/cachecontrol/caches/file_cache.py index 1fd28013084..e6e3a57947f 100644 --- a/src/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/file_cache.py @@ -6,7 +6,8 @@ import hashlib import os from textwrap import dedent -from typing import IO, TYPE_CHECKING +from typing import IO, TYPE_CHECKING, Union +from pathlib import Path from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache from pip._vendor.cachecontrol.controller import CacheController @@ -63,7 +64,7 @@ class _FileCacheMixin: def __init__( self, - directory: str, + directory: str | Path, forever: bool = False, filemode: int = 0o0600, dirmode: int = 0o0700, @@ -79,7 +80,7 @@ def __init__( """ NOTE: In order to use the FileCache you must have filelock installed. You can install it via pip: - pip install filelock + pip install cachecontrol[filecache] """ ) raise ImportError(notice) diff --git a/src/pip/_vendor/cachecontrol/controller.py b/src/pip/_vendor/cachecontrol/controller.py index 586b9f97b80..d7dd86e5f70 100644 --- a/src/pip/_vendor/cachecontrol/controller.py +++ b/src/pip/_vendor/cachecontrol/controller.py @@ -142,6 +142,11 @@ def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None: """ Load a cached response, or return None if it's not available. """ + # We do not support caching of partial content: so if the request contains a + # Range header then we don't want to load anything from the cache. + if "Range" in request.headers: + return None + cache_url = request.url assert cache_url is not None cache_data = self.cache.get(cache_url) @@ -480,7 +485,7 @@ def update_cached_response( cached_response.headers.update( { k: v - for k, v in response.headers.items() # type: ignore[no-untyped-call] + for k, v in response.headers.items() if k.lower() not in excluded_headers } ) diff --git a/src/pip/_vendor/cachecontrol/heuristics.py b/src/pip/_vendor/cachecontrol/heuristics.py index b9d72ca4ac5..f6e5634e385 100644 --- a/src/pip/_vendor/cachecontrol/heuristics.py +++ b/src/pip/_vendor/cachecontrol/heuristics.py @@ -68,7 +68,7 @@ def update_headers(self, response: HTTPResponse) -> dict[str, str]: if "expires" not in response.headers: date = parsedate(response.headers["date"]) - expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc] + expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[index,misc] headers["expires"] = datetime_to_header(expires) headers["cache-control"] = "public" return headers diff --git a/src/pip/_vendor/cachecontrol/serialize.py b/src/pip/_vendor/cachecontrol/serialize.py index f9e967c3c34..a49487a1493 100644 --- a/src/pip/_vendor/cachecontrol/serialize.py +++ b/src/pip/_vendor/cachecontrol/serialize.py @@ -32,13 +32,13 @@ def dumps( # also update the response with a new file handler to be # sure it acts as though it was never read. body = response.read(decode_content=False) - response._fp = io.BytesIO(body) # type: ignore[attr-defined] + response._fp = io.BytesIO(body) # type: ignore[assignment] response.length_remaining = len(body) data = { "response": { "body": body, # Empty bytestring if body is stored separately - "headers": {str(k): str(v) for k, v in response.headers.items()}, # type: ignore[no-untyped-call] + "headers": {str(k): str(v) for k, v in response.headers.items()}, "status": response.status, "version": response.version, "reason": str(response.reason), @@ -72,31 +72,14 @@ def loads( if not data: return None - # Determine what version of the serializer the data was serialized - # with - try: - ver, data = data.split(b",", 1) - except ValueError: - ver = b"cc=0" - - # Make sure that our "ver" is actually a version and isn't a false - # positive from a , being in the data stream. - if ver[:3] != b"cc=": - data = ver + data - ver = b"cc=0" - - # Get the version number out of the cc=N - verstr = ver.split(b"=", 1)[-1].decode("ascii") - - # Dispatch to the actual load method for the given version - try: - return getattr(self, f"_loads_v{verstr}")(request, data, body_file) # type: ignore[no-any-return] - - except AttributeError: - # This is a version we don't have a loads function for, so we'll - # just treat it as a miss and return None + # Previous versions of this library supported other serialization + # formats, but these have all been removed. + if not data.startswith(f"cc={self.serde_version},".encode()): return None + data = data[5:] + return self._loads_v4(request, data, body_file) + def prepare_response( self, request: PreparedRequest, @@ -149,49 +132,6 @@ def prepare_response( return HTTPResponse(body=body, preload_content=False, **cached["response"]) - def _loads_v0( - self, - request: PreparedRequest, - data: bytes, - body_file: IO[bytes] | None = None, - ) -> None: - # The original legacy cache data. This doesn't contain enough - # information to construct everything we need, so we'll treat this as - # a miss. - return None - - def _loads_v1( - self, - request: PreparedRequest, - data: bytes, - body_file: IO[bytes] | None = None, - ) -> HTTPResponse | None: - # The "v1" pickled cache format. This is no longer supported - # for security reasons, so we treat it as a miss. - return None - - def _loads_v2( - self, - request: PreparedRequest, - data: bytes, - body_file: IO[bytes] | None = None, - ) -> HTTPResponse | None: - # The "v2" compressed base64 cache format. - # This has been removed due to age and poor size/performance - # characteristics, so we treat it as a miss. - return None - - def _loads_v3( - self, - request: PreparedRequest, - data: bytes, - body_file: IO[bytes] | None = None, - ) -> None: - # Due to Python 2 encoding issues, it's impossible to know for sure - # exactly how to load v3 entries, thus we'll treat these as a miss so - # that they get rewritten out as v4 entries. - return None - def _loads_v4( self, request: PreparedRequest, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f00a8c02db4..bb962664d08 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,4 +1,4 @@ -CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this. +CacheControl==0.14.0 colorama==0.4.6 distlib==0.3.8 distro==1.9.0 From 531bf756080b5b97d8c4601c683b3558dd2414ef Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 20:59:26 +0100 Subject: [PATCH 2/7] Upgrade platformdirs to 4.2.1 --- news/platformdirs.vendor.rst | 2 +- src/pip/_vendor/platformdirs/__init__.py | 76 ++++++++++++------------ src/pip/_vendor/platformdirs/__main__.py | 2 +- src/pip/_vendor/platformdirs/android.py | 27 +++++---- src/pip/_vendor/platformdirs/api.py | 33 ++++++---- src/pip/_vendor/platformdirs/macos.py | 11 ++-- src/pip/_vendor/platformdirs/unix.py | 37 +++++++----- src/pip/_vendor/platformdirs/version.py | 4 +- src/pip/_vendor/platformdirs/windows.py | 35 ++++++----- src/pip/_vendor/vendor.txt | 2 +- 10 files changed, 129 insertions(+), 100 deletions(-) diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst index fb749d1ab8d..4c1af68710e 100644 --- a/news/platformdirs.vendor.rst +++ b/news/platformdirs.vendor.rst @@ -1 +1 @@ -Upgrade platformdirs to 4.2.0 +Upgrade platformdirs to 4.2.1 diff --git a/src/pip/_vendor/platformdirs/__init__.py b/src/pip/_vendor/platformdirs/__init__.py index 3da5ac1474f..d58dd2b7dde 100644 --- a/src/pip/_vendor/platformdirs/__init__.py +++ b/src/pip/_vendor/platformdirs/__init__.py @@ -1,6 +1,8 @@ """ -Utilities for determining application-specific dirs. See for details and -usage. +Utilities for determining application-specific dirs. + +See for details and usage. + """ from __future__ import annotations @@ -20,22 +22,22 @@ def _set_platform_dir_class() -> type[PlatformDirsABC]: if sys.platform == "win32": - from pip._vendor.platformdirs.windows import Windows as Result + from pip._vendor.platformdirs.windows import Windows as Result # noqa: PLC0415 elif sys.platform == "darwin": - from pip._vendor.platformdirs.macos import MacOS as Result + from pip._vendor.platformdirs.macos import MacOS as Result # noqa: PLC0415 else: - from pip._vendor.platformdirs.unix import Unix as Result + from pip._vendor.platformdirs.unix import Unix as Result # noqa: PLC0415 if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": if os.getenv("SHELL") or os.getenv("PREFIX"): return Result - from pip._vendor.platformdirs.android import _android_folder + from pip._vendor.platformdirs.android import _android_folder # noqa: PLC0415 if _android_folder() is not None: - from pip._vendor.platformdirs.android import Android + from pip._vendor.platformdirs.android import Android # noqa: PLC0415 - return Android # return to avoid redefinition of result + return Android # return to avoid redefinition of a result return Result @@ -507,7 +509,7 @@ def user_log_path( def user_documents_path() -> Path: - """:returns: documents path tied to the user""" + """:returns: documents a path tied to the user""" return PlatformDirs().user_documents_path @@ -585,41 +587,41 @@ def site_runtime_path( __all__ = [ - "__version__", - "__version_info__", - "PlatformDirs", "AppDirs", + "PlatformDirs", "PlatformDirsABC", - "user_data_dir", - "user_config_dir", - "user_cache_dir", - "user_state_dir", - "user_log_dir", - "user_documents_dir", - "user_downloads_dir", - "user_pictures_dir", - "user_videos_dir", - "user_music_dir", - "user_desktop_dir", - "user_runtime_dir", - "site_data_dir", - "site_config_dir", + "__version__", + "__version_info__", "site_cache_dir", + "site_cache_path", + "site_config_dir", + "site_config_path", + "site_data_dir", + "site_data_path", "site_runtime_dir", - "user_data_path", - "user_config_path", + "site_runtime_path", + "user_cache_dir", "user_cache_path", - "user_state_path", - "user_log_path", + "user_config_dir", + "user_config_path", + "user_data_dir", + "user_data_path", + "user_desktop_dir", + "user_desktop_path", + "user_documents_dir", "user_documents_path", + "user_downloads_dir", "user_downloads_path", - "user_pictures_path", - "user_videos_path", + "user_log_dir", + "user_log_path", + "user_music_dir", "user_music_path", - "user_desktop_path", + "user_pictures_dir", + "user_pictures_path", + "user_runtime_dir", "user_runtime_path", - "site_data_path", - "site_config_path", - "site_cache_path", - "site_runtime_path", + "user_state_dir", + "user_state_path", + "user_videos_dir", + "user_videos_path", ] diff --git a/src/pip/_vendor/platformdirs/__main__.py b/src/pip/_vendor/platformdirs/__main__.py index 61342265b60..fa8a677a336 100644 --- a/src/pip/_vendor/platformdirs/__main__.py +++ b/src/pip/_vendor/platformdirs/__main__.py @@ -24,7 +24,7 @@ def main() -> None: - """Run main entry point.""" + """Run the main entry point.""" app_name = "MyApp" app_author = "MyCompany" diff --git a/src/pip/_vendor/platformdirs/android.py b/src/pip/_vendor/platformdirs/android.py index 4acdb63833f..fefafd32977 100644 --- a/src/pip/_vendor/platformdirs/android.py +++ b/src/pip/_vendor/platformdirs/android.py @@ -13,10 +13,11 @@ class Android(PlatformDirsABC): """ - Follows the guidance `from here `_. Makes use of the - `appname `, - `version `, - `ensure_exists `. + Follows the guidance `from here `_. + + Makes use of the `appname `, `version + `, `ensure_exists `. + """ @property @@ -44,7 +45,7 @@ def site_config_dir(self) -> str: @property def user_cache_dir(self) -> str: - """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``""" + """:return: cache directory tied to the user, e.g.,``/data/user///cache/``""" return self._append_app_name_and_version(cast(str, _android_folder()), "cache") @property @@ -119,13 +120,13 @@ def site_runtime_dir(self) -> str: def _android_folder() -> str | None: """:return: base folder for the Android OS or None if it cannot be found""" try: - # First try to get path to android app via pyjnius - from jnius import autoclass + # First try to get a path to android app via pyjnius + from jnius import autoclass # noqa: PLC0415 context = autoclass("android.content.Context") result: str | None = context.getFilesDir().getParentFile().getAbsolutePath() except Exception: # noqa: BLE001 - # if fails find an android folder looking path on the sys.path + # if fails find an android folder looking a path on the sys.path pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files") for path in sys.path: if pattern.match(path): @@ -141,7 +142,7 @@ def _android_documents_folder() -> str: """:return: documents folder for the Android OS""" # Get directories with pyjnius try: - from jnius import autoclass + from jnius import autoclass # noqa: PLC0415 context = autoclass("android.content.Context") environment = autoclass("android.os.Environment") @@ -157,7 +158,7 @@ def _android_downloads_folder() -> str: """:return: downloads folder for the Android OS""" # Get directories with pyjnius try: - from jnius import autoclass + from jnius import autoclass # noqa: PLC0415 context = autoclass("android.content.Context") environment = autoclass("android.os.Environment") @@ -173,7 +174,7 @@ def _android_pictures_folder() -> str: """:return: pictures folder for the Android OS""" # Get directories with pyjnius try: - from jnius import autoclass + from jnius import autoclass # noqa: PLC0415 context = autoclass("android.content.Context") environment = autoclass("android.os.Environment") @@ -189,7 +190,7 @@ def _android_videos_folder() -> str: """:return: videos folder for the Android OS""" # Get directories with pyjnius try: - from jnius import autoclass + from jnius import autoclass # noqa: PLC0415 context = autoclass("android.content.Context") environment = autoclass("android.os.Environment") @@ -205,7 +206,7 @@ def _android_music_folder() -> str: """:return: music folder for the Android OS""" # Get directories with pyjnius try: - from jnius import autoclass + from jnius import autoclass # noqa: PLC0415 context = autoclass("android.content.Context") environment = autoclass("android.os.Environment") diff --git a/src/pip/_vendor/platformdirs/api.py b/src/pip/_vendor/platformdirs/api.py index 031a38a3d36..c50caa648a6 100644 --- a/src/pip/_vendor/platformdirs/api.py +++ b/src/pip/_vendor/platformdirs/api.py @@ -11,10 +11,10 @@ from typing import Iterator, Literal -class PlatformDirsABC(ABC): +class PlatformDirsABC(ABC): # noqa: PLR0904 """Abstract base class for platform directories.""" - def __init__( # noqa: PLR0913 + def __init__( # noqa: PLR0913, PLR0917 self, appname: str | None = None, appauthor: str | None | Literal[False] = None, @@ -34,34 +34,47 @@ def __init__( # noqa: PLR0913 :param multipath: See `multipath`. :param opinion: See `opinion`. :param ensure_exists: See `ensure_exists`. + """ self.appname = appname #: The name of application. self.appauthor = appauthor """ - The name of the app author or distributing body for this application. Typically, it is the owning company name. - Defaults to `appname`. You may pass ``False`` to disable it. + The name of the app author or distributing body for this application. + + Typically, it is the owning company name. Defaults to `appname`. You may pass ``False`` to disable it. + """ self.version = version """ - An optional version path element to append to the path. You might want to use this if you want multiple versions - of your app to be able to run independently. If used, this would typically be ``.``. + An optional version path element to append to the path. + + You might want to use this if you want multiple versions of your app to be able to run independently. If used, + this would typically be ``.``. + """ self.roaming = roaming """ - Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup - for roaming profiles, this user data will be synced on login (see - `here `_). + Whether to use the roaming appdata directory on Windows. + + That means that for users on a Windows network setup for roaming profiles, this user data will be synced on + login (see + `here `_). + """ self.multipath = multipath """ An optional parameter which indicates that the entire list of data dirs should be returned. + By default, the first item would only be returned. + """ self.opinion = opinion #: A flag to indicating to use opinionated values. self.ensure_exists = ensure_exists """ Optionally create the directory (and any missing parents) upon access if it does not exist. + By default, no directories are created. + """ def _append_app_name_and_version(self, *base: str) -> str: @@ -200,7 +213,7 @@ def user_log_path(self) -> Path: @property def user_documents_path(self) -> Path: - """:return: documents path tied to the user""" + """:return: documents a path tied to the user""" return Path(self.user_documents_dir) @property diff --git a/src/pip/_vendor/platformdirs/macos.py b/src/pip/_vendor/platformdirs/macos.py index b7b48808ca7..eb1ba5df1da 100644 --- a/src/pip/_vendor/platformdirs/macos.py +++ b/src/pip/_vendor/platformdirs/macos.py @@ -10,11 +10,14 @@ class MacOS(PlatformDirsABC): """ - Platform directories for the macOS operating system. Follows the guidance from `Apple documentation - `_. + Platform directories for the macOS operating system. + + Follows the guidance from + `Apple documentation `_. Makes use of the `appname `, `version `, `ensure_exists `. + """ @property @@ -28,7 +31,7 @@ def site_data_dir(self) -> str: :return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``. If we're using a Python binary managed by `Homebrew `_, the directory will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``. - If `multipath ` is enabled and we're in Homebrew, + If `multipath ` is enabled, and we're in Homebrew, the response is a multi-path string separated by ":", e.g. ``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version`` """ @@ -60,7 +63,7 @@ def site_cache_dir(self) -> str: :return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``. If we're using a Python binary managed by `Homebrew `_, the directory will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``. - If `multipath ` is enabled and we're in Homebrew, + If `multipath ` is enabled, and we're in Homebrew, the response is a multi-path string separated by ":", e.g. ``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version`` """ diff --git a/src/pip/_vendor/platformdirs/unix.py b/src/pip/_vendor/platformdirs/unix.py index ca4728e6079..9500ade614c 100644 --- a/src/pip/_vendor/platformdirs/unix.py +++ b/src/pip/_vendor/platformdirs/unix.py @@ -6,13 +6,13 @@ import sys from configparser import ConfigParser from pathlib import Path -from typing import Iterator +from typing import Iterator, NoReturn from .api import PlatformDirsABC if sys.platform == "win32": - def getuid() -> int: + def getuid() -> NoReturn: msg = "should only be used on Unix" raise RuntimeError(msg) @@ -20,17 +20,17 @@ def getuid() -> int: from os import getuid -class Unix(PlatformDirsABC): +class Unix(PlatformDirsABC): # noqa: PLR0904 """ - On Unix/Linux, we follow the - `XDG Basedir Spec `_. The spec allows - overriding directories with environment variables. The examples show are the default values, alongside the name of - the environment variable that overrides them. Makes use of the - `appname `, - `version `, - `multipath `, - `opinion `, - `ensure_exists `. + On Unix/Linux, we follow the `XDG Basedir Spec `_. + + The spec allows overriding directories with environment variables. The examples shown are the default values, + alongside the name of the environment variable that overrides them. Makes use of the `appname + `, `version `, `multipath + `, `opinion `, `ensure_exists + `. + """ @property @@ -205,17 +205,17 @@ def site_runtime_dir(self) -> str: @property def site_data_path(self) -> Path: - """:return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``""" + """:return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``""" return self._first_item_as_path_if_multipath(self.site_data_dir) @property def site_config_path(self) -> Path: - """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``""" + """:return: config path shared by the users, returns the first item, even if ``multipath`` is set to ``True``""" return self._first_item_as_path_if_multipath(self.site_config_dir) @property def site_cache_path(self) -> Path: - """:return: cache path shared by users. Only return first item, even if ``multipath`` is set to ``True``""" + """:return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``""" return self._first_item_as_path_if_multipath(self.site_cache_dir) def _first_item_as_path_if_multipath(self, directory: str) -> Path: @@ -246,7 +246,12 @@ def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str: def _get_user_dirs_folder(key: str) -> str | None: - """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/.""" + """ + Return directory from user-dirs.dirs config file. + + See https://freedesktop.org/wiki/Software/xdg-user-dirs/. + + """ user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs" if user_dirs_config_path.exists(): parser = ConfigParser() diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py index cc1e34568ad..c418cd0c9af 100644 --- a/src/pip/_vendor/platformdirs/version.py +++ b/src/pip/_vendor/platformdirs/version.py @@ -12,5 +12,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = '4.2.0' -__version_tuple__ = version_tuple = (4, 2, 0) +__version__ = version = '4.2.1' +__version_tuple__ = version_tuple = (4, 2, 1) diff --git a/src/pip/_vendor/platformdirs/windows.py b/src/pip/_vendor/platformdirs/windows.py index c62d0c8d2ba..d7bc96091a2 100644 --- a/src/pip/_vendor/platformdirs/windows.py +++ b/src/pip/_vendor/platformdirs/windows.py @@ -2,7 +2,6 @@ from __future__ import annotations -import ctypes import os import sys from functools import lru_cache @@ -16,15 +15,13 @@ class Windows(PlatformDirsABC): """ - `MSDN on where to store app data files - `_. - Makes use of the - `appname `, - `appauthor `, - `version `, - `roaming `, - `opinion `, - `ensure_exists `. + `MSDN on where to store app data files `_. + + Makes use of the `appname `, `appauthor + `, `version `, `roaming + `, `opinion `, `ensure_exists + `. + """ @property @@ -165,7 +162,7 @@ def get_win_folder_from_env_vars(csidl_name: str) -> str: def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None: - """Get folder for a CSIDL name that does not exist as an environment variable.""" + """Get a folder for a CSIDL name that does not exist as an environment variable.""" if csidl_name == "CSIDL_PERSONAL": return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") # noqa: PTH118 @@ -189,6 +186,7 @@ def get_win_folder_from_registry(csidl_name: str) -> str: This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer for all CSIDL_* names. + """ shell_folder_name = { "CSIDL_APPDATA": "AppData", @@ -205,7 +203,7 @@ def get_win_folder_from_registry(csidl_name: str) -> str: raise ValueError(msg) if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows raise NotImplementedError - import winreg + import winreg # noqa: PLC0415 key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders") directory, _ = winreg.QueryValueEx(key, shell_folder_name) @@ -218,6 +216,8 @@ def get_win_folder_via_ctypes(csidl_name: str) -> str: # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead. # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid + import ctypes # noqa: PLC0415 + csidl_const = { "CSIDL_APPDATA": 26, "CSIDL_COMMON_APPDATA": 35, @@ -250,10 +250,15 @@ def get_win_folder_via_ctypes(csidl_name: str) -> str: def _pick_get_win_folder() -> Callable[[str], str]: - if hasattr(ctypes, "windll"): - return get_win_folder_via_ctypes try: - import winreg # noqa: F401 + import ctypes # noqa: PLC0415 + except ImportError: + pass + else: + if hasattr(ctypes, "windll"): + return get_win_folder_via_ctypes + try: + import winreg # noqa: PLC0415, F401 except ImportError: return get_win_folder_from_env_vars else: diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index bb962664d08..fc60f5cae04 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -4,7 +4,7 @@ distlib==0.3.8 distro==1.9.0 msgpack==1.0.8 packaging==21.3 -platformdirs==4.2.0 +platformdirs==4.2.1 pyparsing==3.1.0 pyproject-hooks==1.0.0 requests==2.31.0 From f404164d7918b6939e5dceff1b4c41c3e8e1591b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 20:59:46 +0100 Subject: [PATCH 3/7] Upgrade pyparsing to 3.1.2 --- news/pyparsing.vendor.rst | 1 + src/pip/_vendor/pyparsing/__init__.py | 7 +- src/pip/_vendor/pyparsing/actions.py | 21 +- src/pip/_vendor/pyparsing/common.py | 9 +- src/pip/_vendor/pyparsing/core.py | 964 +++++++++--------- src/pip/_vendor/pyparsing/diagram/__init__.py | 2 +- src/pip/_vendor/pyparsing/exceptions.py | 81 +- src/pip/_vendor/pyparsing/helpers.py | 86 +- src/pip/_vendor/pyparsing/results.py | 211 ++-- src/pip/_vendor/pyparsing/testing.py | 118 ++- src/pip/_vendor/pyparsing/unicode.py | 13 +- src/pip/_vendor/pyparsing/util.py | 13 +- src/pip/_vendor/vendor.txt | 2 +- 13 files changed, 740 insertions(+), 788 deletions(-) create mode 100644 news/pyparsing.vendor.rst diff --git a/news/pyparsing.vendor.rst b/news/pyparsing.vendor.rst new file mode 100644 index 00000000000..d6aab47079f --- /dev/null +++ b/news/pyparsing.vendor.rst @@ -0,0 +1 @@ +Upgrade pyparsing to 3.1.2 diff --git a/src/pip/_vendor/pyparsing/__init__.py b/src/pip/_vendor/pyparsing/__init__.py index 88bc10ac18a..83937ebf1f1 100644 --- a/src/pip/_vendor/pyparsing/__init__.py +++ b/src/pip/_vendor/pyparsing/__init__.py @@ -120,8 +120,8 @@ def __repr__(self): return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})" -__version_info__ = version_info(3, 1, 0, "final", 1) -__version_time__ = "18 Jun 2023 14:05 UTC" +__version_info__ = version_info(3, 1, 2, "final", 1) +__version_time__ = "06 Mar 2024 07:08 UTC" __version__ = __version_info__.__version__ __versionTime__ = __version_time__ __author__ = "Paul McGuire " @@ -319,4 +319,7 @@ def __repr__(self): "unicodeString", "withAttribute", "withClass", + "common", + "unicode", + "testing", ] diff --git a/src/pip/_vendor/pyparsing/actions.py b/src/pip/_vendor/pyparsing/actions.py index ca6e4c6afb4..ce51b3957ca 100644 --- a/src/pip/_vendor/pyparsing/actions.py +++ b/src/pip/_vendor/pyparsing/actions.py @@ -111,7 +111,6 @@ def with_attribute(*args, **attr_dict):
1,3 2,3 1,1
this has no type
- ''' div,div_end = make_html_tags("div") @@ -199,19 +198,9 @@ def with_class(classname, namespace=""): # pre-PEP8 compatibility symbols # fmt: off -@replaced_by_pep8(replace_with) -def replaceWith(): ... - -@replaced_by_pep8(remove_quotes) -def removeQuotes(): ... - -@replaced_by_pep8(with_attribute) -def withAttribute(): ... - -@replaced_by_pep8(with_class) -def withClass(): ... - -@replaced_by_pep8(match_only_at_col) -def matchOnlyAtCol(): ... - +replaceWith = replaced_by_pep8("replaceWith", replace_with) +removeQuotes = replaced_by_pep8("removeQuotes", remove_quotes) +withAttribute = replaced_by_pep8("withAttribute", with_attribute) +withClass = replaced_by_pep8("withClass", with_class) +matchOnlyAtCol = replaced_by_pep8("matchOnlyAtCol", match_only_at_col) # fmt: on diff --git a/src/pip/_vendor/pyparsing/common.py b/src/pip/_vendor/pyparsing/common.py index 7a666b276df..74faa46085a 100644 --- a/src/pip/_vendor/pyparsing/common.py +++ b/src/pip/_vendor/pyparsing/common.py @@ -206,7 +206,7 @@ class pyparsing_common: scientific notation and returns a float""" # streamlining this expression makes the docs nicer-looking - number = (sci_real | real | signed_integer).setName("number").streamline() + number = (sci_real | real | signed_integer).set_name("number").streamline() """any numeric expression, returns the corresponding Python type""" fnumber = ( @@ -216,6 +216,13 @@ class pyparsing_common: ) """any int or real number, returned as float""" + ieee_float = ( + Regex(r"(?i)[+-]?((\d+\.?\d*(e[+-]?\d+)?)|nan|inf(inity)?)") + .set_name("ieee_float") + .set_parse_action(convert_to_float) + ) + """any floating-point literal (int, real number, infinity, or NaN), returned as float""" + identifier = Word(identchars, identbodychars).set_name("identifier") """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" diff --git a/src/pip/_vendor/pyparsing/core.py b/src/pip/_vendor/pyparsing/core.py index 8d5a856ecd6..fc403862354 100644 --- a/src/pip/_vendor/pyparsing/core.py +++ b/src/pip/_vendor/pyparsing/core.py @@ -571,6 +571,7 @@ def set_results_name( Example:: + integer = Word(nums) date_str = (integer.set_results_name("year") + '/' + integer.set_results_name("month") + '/' + integer.set_results_name("day")) @@ -610,9 +611,8 @@ def breaker(instring, loc, doActions=True, callPreParse=True): breaker._originalParseMethod = _parseMethod # type: ignore [attr-defined] self._parse = breaker # type: ignore [assignment] - else: - if hasattr(self._parse, "_originalParseMethod"): - self._parse = self._parse._originalParseMethod # type: ignore [attr-defined, assignment] + elif hasattr(self._parse, "_originalParseMethod"): + self._parse = self._parse._originalParseMethod # type: ignore [attr-defined, assignment] return self def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": @@ -692,13 +692,15 @@ def is_valid_date(instring, loc, toks): """ if list(fns) == [None]: self.parseAction = [] - else: - if not all(callable(fn) for fn in fns): - raise TypeError("parse actions must be callable") - self.parseAction = [_trim_arity(fn) for fn in fns] - self.callDuringTry = kwargs.get( - "call_during_try", kwargs.get("callDuringTry", False) - ) + return self + + if not all(callable(fn) for fn in fns): + raise TypeError("parse actions must be callable") + self.parseAction = [_trim_arity(fn) for fn in fns] + self.callDuringTry = kwargs.get( + "call_during_try", kwargs.get("callDuringTry", False) + ) + return self def add_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": @@ -771,6 +773,7 @@ def _skipIgnorables(self, instring: str, loc: int) -> int: return loc exprsFound = True ignore_expr_fns = [e._parse for e in self.ignoreExprs] + last_loc = loc while exprsFound: exprsFound = False for ignore_fn in ignore_expr_fns: @@ -780,6 +783,10 @@ def _skipIgnorables(self, instring: str, loc: int) -> int: exprsFound = True except ParseException: pass + # check if all ignore exprs matched but didn't actually advance the parse location + if loc == last_loc: + break + last_loc = loc return loc def preParse(self, instring: str, loc: int) -> int: @@ -939,11 +946,9 @@ class to help type checking not_in_cache: bool - def get(self, *args): - ... + def get(self, *args): ... - def set(self, *args): - ... + def set(self, *args): ... # argument cache for optimizing repeated calls when backtracking through recursive expressions packrat_cache = ( @@ -1075,11 +1080,13 @@ def enable_left_recursion( elif cache_size_limit > 0: ParserElement.recursion_memos = _LRUMemo(capacity=cache_size_limit) # type: ignore[assignment] else: - raise NotImplementedError("Memo size of %s" % cache_size_limit) + raise NotImplementedError(f"Memo size of {cache_size_limit}") ParserElement._left_recursion_enabled = True @staticmethod - def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: + def enable_packrat( + cache_size_limit: Union[int, None] = 128, *, force: bool = False + ) -> None: """ Enables "packrat" parsing, which adds memoizing to the parsing logic. Repeated parse attempts at the same string location (which happens @@ -1114,13 +1121,16 @@ def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: ParserElement.disable_memoization() elif ParserElement._left_recursion_enabled: raise RuntimeError("Packrat and Bounded Recursion are not compatible") - if not ParserElement._packratEnabled: - ParserElement._packratEnabled = True - if cache_size_limit is None: - ParserElement.packrat_cache = _UnboundedCache() - else: - ParserElement.packrat_cache = _FifoCache(cache_size_limit) # type: ignore[assignment] - ParserElement._parse = ParserElement._parseCache + + if ParserElement._packratEnabled: + return + + ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = _UnboundedCache() + else: + ParserElement.packrat_cache = _FifoCache(cache_size_limit) # type: ignore[assignment] + ParserElement._parse = ParserElement._parseCache def parse_string( self, instring: str, parse_all: bool = False, *, parseAll: bool = False @@ -1278,9 +1288,9 @@ def scan_string( except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc.with_traceback(None) + + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc.with_traceback(None) def transform_string(self, instring: str, *, debug: bool = False) -> str: """ @@ -1310,23 +1320,27 @@ def transform_string(self, instring: str, *, debug: bool = False) -> str: try: for t, s, e in self.scan_string(instring, debug=debug): out.append(instring[lastE:s]) - if t: - if isinstance(t, ParseResults): - out += t.as_list() - elif isinstance(t, Iterable) and not isinstance(t, str_type): - out.extend(t) - else: - out.append(t) lastE = e + + if not t: + continue + + if isinstance(t, ParseResults): + out += t.as_list() + elif isinstance(t, Iterable) and not isinstance(t, str_type): + out.extend(t) + else: + out.append(t) + out.append(instring[lastE:]) out = [o for o in out if o] return "".join([str(s) for s in _flatten(out)]) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc.with_traceback(None) + + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc.with_traceback(None) def search_string( self, @@ -1364,9 +1378,9 @@ def search_string( except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc.with_traceback(None) + + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc.with_traceback(None) def split( self, @@ -1495,9 +1509,12 @@ def __mul__(self, other) -> "ParserElement": elif isinstance(other, tuple) and other[:1] == (Ellipsis,): other = ((0,) + other[1:] + (None,))[:2] + if not isinstance(other, (int, tuple)): + return NotImplemented + if isinstance(other, int): minElements, optElements = other, 0 - elif isinstance(other, tuple): + else: other = tuple(o if o is not Ellipsis else None for o in other) other = (other + (None, None))[:2] if other[0] is None: @@ -1514,8 +1531,6 @@ def __mul__(self, other) -> "ParserElement": optElements -= minElements else: return NotImplemented - else: - return NotImplemented if minElements < 0: raise ValueError("cannot multiply ParserElement by negative value") @@ -1704,8 +1719,8 @@ def __call__(self, name: typing.Optional[str] = None) -> "ParserElement": """ if name is not None: return self._setResultsName(name) - else: - return self.copy() + + return self.copy() def suppress(self) -> "ParserElement": """ @@ -1763,7 +1778,7 @@ def ignore(self, other: "ParserElement") -> "ParserElement": Example:: - patt = Word(alphas)[1, ...] + patt = Word(alphas)[...] patt.parse_string('ablaj /* comment */ lskjd') # -> ['ablaj'] @@ -1771,8 +1786,6 @@ def ignore(self, other: "ParserElement") -> "ParserElement": patt.parse_string('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] """ - import typing - if isinstance(other, str_type): other = Suppress(other) @@ -1880,11 +1893,14 @@ def set_name(self, name: str) -> "ParserElement": Example:: - Word(nums).parse_string("ABC") # -> Exception: Expected W:(0-9) (at char 0), (line:1, col:1) - Word(nums).set_name("integer").parse_string("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + integer = Word(nums) + integer.parse_string("ABC") # -> Exception: Expected W:(0-9) (at char 0), (line:1, col:1) + + integer.set_name("integer") + integer.parse_string("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) """ self.customName = name - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" if __diag__.enable_debug_on_named_expressions: self.set_debug() return self @@ -1950,9 +1966,9 @@ def parse_file( except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clears out pyparsing internal stack trace - raise exc.with_traceback(None) + + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc.with_traceback(None) def __eq__(self, other): if self is other: @@ -2130,6 +2146,7 @@ def run_tests( success = True NL = Literal(r"\n").add_parse_action(replace_with("\n")).ignore(quoted_string) BOM = "\ufeff" + nlstr = "\n" for t in tests: if comment_specified and comment.matches(t, False) or comments and not t: comments.append( @@ -2139,7 +2156,7 @@ def run_tests( if not t: continue out = [ - "\n" + "\n".join(comments) if comments else "", + f"{nlstr}{nlstr.join(comments) if comments else ''}", pyparsing_test.with_line_numbers(t) if with_line_numbers else t, ] comments = [] @@ -2148,9 +2165,9 @@ def run_tests( t = NL.transform_string(t.lstrip(BOM)) result = self.parse_string(t, parse_all=parseAll) except ParseBaseException as pe: - fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" + fatal = "(FATAL) " if isinstance(pe, ParseFatalException) else "" out.append(pe.explain()) - out.append("FAIL: " + str(pe)) + out.append(f"FAIL: {fatal}{pe}") if ParserElement.verbose_stacktrace: out.extend(traceback.format_tb(pe.__traceback__)) success = success and failureTests @@ -2237,91 +2254,42 @@ def create_diagram( show_groups=show_groups, diagram_kwargs=kwargs, ) - if isinstance(output_html, (str, Path)): - with open(output_html, "w", encoding="utf-8") as diag_file: - diag_file.write(railroad_to_html(railroad, embed=embed, **kwargs)) - else: + if not isinstance(output_html, (str, Path)): # we were passed a file-like object, just write to it output_html.write(railroad_to_html(railroad, embed=embed, **kwargs)) + return + + with open(output_html, "w", encoding="utf-8") as diag_file: + diag_file.write(railroad_to_html(railroad, embed=embed, **kwargs)) # Compatibility synonyms # fmt: off - @staticmethod - @replaced_by_pep8(inline_literals_using) - def inlineLiteralsUsing(): ... - - @staticmethod - @replaced_by_pep8(set_default_whitespace_chars) - def setDefaultWhitespaceChars(): ... - - @replaced_by_pep8(set_results_name) - def setResultsName(self): ... - - @replaced_by_pep8(set_break) - def setBreak(self): ... - - @replaced_by_pep8(set_parse_action) - def setParseAction(self): ... - - @replaced_by_pep8(add_parse_action) - def addParseAction(self): ... - - @replaced_by_pep8(add_condition) - def addCondition(self): ... - - @replaced_by_pep8(set_fail_action) - def setFailAction(self): ... - - @replaced_by_pep8(try_parse) - def tryParse(self): ... - - @staticmethod - @replaced_by_pep8(enable_left_recursion) - def enableLeftRecursion(): ... - - @staticmethod - @replaced_by_pep8(enable_packrat) - def enablePackrat(): ... - - @replaced_by_pep8(parse_string) - def parseString(self): ... - - @replaced_by_pep8(scan_string) - def scanString(self): ... - - @replaced_by_pep8(transform_string) - def transformString(self): ... - - @replaced_by_pep8(search_string) - def searchString(self): ... - - @replaced_by_pep8(ignore_whitespace) - def ignoreWhitespace(self): ... - - @replaced_by_pep8(leave_whitespace) - def leaveWhitespace(self): ... - - @replaced_by_pep8(set_whitespace_chars) - def setWhitespaceChars(self): ... - - @replaced_by_pep8(parse_with_tabs) - def parseWithTabs(self): ... - - @replaced_by_pep8(set_debug_actions) - def setDebugActions(self): ... - - @replaced_by_pep8(set_debug) - def setDebug(self): ... - - @replaced_by_pep8(set_name) - def setName(self): ... - - @replaced_by_pep8(parse_file) - def parseFile(self): ... - - @replaced_by_pep8(run_tests) - def runTests(self): ... - + inlineLiteralsUsing = replaced_by_pep8("inlineLiteralsUsing", inline_literals_using) + setDefaultWhitespaceChars = replaced_by_pep8( + "setDefaultWhitespaceChars", set_default_whitespace_chars + ) + setResultsName = replaced_by_pep8("setResultsName", set_results_name) + setBreak = replaced_by_pep8("setBreak", set_break) + setParseAction = replaced_by_pep8("setParseAction", set_parse_action) + addParseAction = replaced_by_pep8("addParseAction", add_parse_action) + addCondition = replaced_by_pep8("addCondition", add_condition) + setFailAction = replaced_by_pep8("setFailAction", set_fail_action) + tryParse = replaced_by_pep8("tryParse", try_parse) + enableLeftRecursion = replaced_by_pep8("enableLeftRecursion", enable_left_recursion) + enablePackrat = replaced_by_pep8("enablePackrat", enable_packrat) + parseString = replaced_by_pep8("parseString", parse_string) + scanString = replaced_by_pep8("scanString", scan_string) + transformString = replaced_by_pep8("transformString", transform_string) + searchString = replaced_by_pep8("searchString", search_string) + ignoreWhitespace = replaced_by_pep8("ignoreWhitespace", ignore_whitespace) + leaveWhitespace = replaced_by_pep8("leaveWhitespace", leave_whitespace) + setWhitespaceChars = replaced_by_pep8("setWhitespaceChars", set_whitespace_chars) + parseWithTabs = replaced_by_pep8("parseWithTabs", parse_with_tabs) + setDebugActions = replaced_by_pep8("setDebugActions", set_debug_actions) + setDebug = replaced_by_pep8("setDebug", set_debug) + setName = replaced_by_pep8("setName", set_name) + parseFile = replaced_by_pep8("parseFile", parse_file) + runTests = replaced_by_pep8("runTests", run_tests) canParseNext = can_parse_next resetCache = reset_cache defaultName = default_name @@ -2351,7 +2319,7 @@ def must_skip(t): def show_skip(t): if t._skipped.as_list()[-1:] == [""]: t.pop("_skipped") - t["_skipped"] = "missing <" + repr(self.anchor) + ">" + t["_skipped"] = f"missing <{self.anchor!r}>" return ( self.anchor + skipper().add_parse_action(must_skip) @@ -2402,9 +2370,9 @@ class Literal(Token): Example:: - Literal('blah').parse_string('blah') # -> ['blah'] - Literal('blah').parse_string('blahfooblah') # -> ['blah'] - Literal('blah').parse_string('bla') # -> Exception: Expected "blah" + Literal('abc').parse_string('abc') # -> ['abc'] + Literal('abc').parse_string('abcdef') # -> ['abc'] + Literal('abc').parse_string('ab') # -> Exception: Expected "abc" For case-insensitive matching, use :class:`CaselessLiteral`. @@ -2434,7 +2402,7 @@ def __init__(self, match_string: str = "", *, matchString: str = ""): self.match = match_string self.matchLen = len(match_string) self.firstMatchChar = match_string[:1] - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayReturnEmpty = False self.mayIndexError = False @@ -2548,40 +2516,37 @@ def parseImpl(self, instring, loc, doActions=True): or instring[loc + self.matchLen].upper() not in self.identChars ): return loc + self.matchLen, self.match - else: - # followed by keyword char - errmsg += ", was immediately followed by keyword character" - errloc = loc + self.matchLen - else: - # preceded by keyword char - errmsg += ", keyword was immediately preceded by keyword character" - errloc = loc - 1 - # else no match just raise plain exception - else: - if ( - instring[loc] == self.firstMatchChar - and self.matchLen == 1 - or instring.startswith(self.match, loc) - ): - if loc == 0 or instring[loc - 1] not in self.identChars: - if ( - loc >= len(instring) - self.matchLen - or instring[loc + self.matchLen] not in self.identChars - ): - return loc + self.matchLen, self.match - else: - # followed by keyword char - errmsg += ( - ", keyword was immediately followed by keyword character" - ) - errloc = loc + self.matchLen + # followed by keyword char + errmsg += ", was immediately followed by keyword character" + errloc = loc + self.matchLen else: # preceded by keyword char errmsg += ", keyword was immediately preceded by keyword character" errloc = loc - 1 # else no match just raise plain exception + elif ( + instring[loc] == self.firstMatchChar + and self.matchLen == 1 + or instring.startswith(self.match, loc) + ): + if loc == 0 or instring[loc - 1] not in self.identChars: + if ( + loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen] not in self.identChars + ): + return loc + self.matchLen, self.match + + # followed by keyword char + errmsg += ", keyword was immediately followed by keyword character" + errloc = loc + self.matchLen + else: + # preceded by keyword char + errmsg += ", keyword was immediately preceded by keyword character" + errloc = loc - 1 + # else no match just raise plain exception + raise ParseException(instring, errloc, errmsg, self) @staticmethod @@ -2613,7 +2578,7 @@ def __init__(self, match_string: str = "", *, matchString: str = ""): super().__init__(match_string.upper()) # Preserve the defining literal. self.returnString = match_string - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" def parseImpl(self, instring, loc, doActions=True): if instring[loc : loc + self.matchLen].upper() == self.match: @@ -2788,7 +2753,7 @@ class Word(Token): integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) # a word with a leading capital, and zero or more lowercase - capital_word = Word(alphas.upper(), alphas.lower()) + capitalized_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' hostname = Word(alphas, alphanums + '-') @@ -2865,7 +2830,7 @@ def __init__( self.maxLen = exact self.minLen = exact - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayIndexError = False self.asKeyword = asKeyword if self.asKeyword: @@ -2879,7 +2844,7 @@ def __init__( re_leading_fragment = f"[{_collapse_string_to_ranges(self.initChars)}]" if self.bodyChars == self.initChars: - if max == 0: + if max == 0 and self.minLen == 1: repeat = "+" elif max == 1: repeat = "" @@ -2895,19 +2860,17 @@ def __init__( repeat = "" else: re_body_fragment = f"[{_collapse_string_to_ranges(self.bodyChars)}]" - if max == 0: + if max == 0 and self.minLen == 1: repeat = "*" elif max == 2: repeat = "?" if min <= 1 else "" else: if min != max: - repeat = f"{{{min - 1 if min > 0 else 0},{max - 1}}}" + repeat = f"{{{min - 1 if min > 0 else ''},{max - 1 if max > 0 else ''}}}" else: - repeat = f"{{{min - 1 if min > 0 else 0}}}" + repeat = f"{{{min - 1 if min > 0 else ''}}}" - self.reString = ( - f"{re_leading_fragment}" f"{re_body_fragment}" f"{repeat}" - ) + self.reString = f"{re_leading_fragment}{re_body_fragment}{repeat}" if self.asKeyword: self.reString = rf"\b{self.reString}\b" @@ -2924,10 +2887,11 @@ def _generateDefaultName(self) -> str: def charsAsStr(s): max_repr_len = 16 s = _collapse_string_to_ranges(s, re_escape=False) + if len(s) > max_repr_len: return s[: max_repr_len - 3] + "..." - else: - return s + + return s if self.initChars != self.bodyChars: base = f"W:({charsAsStr(self.initChars)}, {charsAsStr(self.bodyChars)})" @@ -2965,14 +2929,11 @@ def parseImpl(self, instring, loc, doActions=True): throwException = True elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars: throwException = True - elif self.asKeyword: - if ( - start > 0 - and instring[start - 1] in bodychars - or loc < instrlen - and instring[loc] in bodychars - ): - throwException = True + elif self.asKeyword and ( + (start > 0 and instring[start - 1] in bodychars) + or (loc < instrlen and instring[loc] in bodychars) + ): + throwException = True if throwException: raise ParseException(instring, loc, self.errmsg, self) @@ -3072,7 +3033,7 @@ def __init__( "Regex may only be constructed with a string or a compiled RE object" ) - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayIndexError = False self.asGroupList = asGroupList self.asMatch = asMatch @@ -3085,11 +3046,11 @@ def __init__( def re(self): if self._re: return self._re - else: - try: - return re.compile(self.pattern, self.flags) - except re.error: - raise ValueError(f"invalid pattern ({self.pattern!r}) passed to Regex") + + try: + return re.compile(self.pattern, self.flags) + except re.error: + raise ValueError(f"invalid pattern ({self.pattern!r}) passed to Regex") @cached_property def re_match(self): @@ -3110,9 +3071,10 @@ def parseImpl(self, instring, loc, doActions=True): loc = result.end() ret = ParseResults(result.group()) d = result.groupdict() - if d: - for k, v in d.items(): - ret[k] = v + + for k, v in d.items(): + ret[k] = v + return loc, ret def parseImplAsGroupList(self, instring, loc, doActions=True): @@ -3204,6 +3166,7 @@ class QuotedString(Token): [['This is the "quote"']] [['This is the quote with "embedded" quotes']] """ + ws_map = dict(((r"\t", "\t"), (r"\n", "\n"), (r"\f", "\f"), (r"\r", "\r"))) def __init__( @@ -3224,148 +3187,164 @@ def __init__( convertWhitespaceEscapes: bool = True, ): super().__init__() - escChar = escChar or esc_char - escQuote = escQuote or esc_quote - unquoteResults = unquoteResults and unquote_results - endQuoteChar = endQuoteChar or end_quote_char - convertWhitespaceEscapes = ( + esc_char = escChar or esc_char + esc_quote = escQuote or esc_quote + unquote_results = unquoteResults and unquote_results + end_quote_char = endQuoteChar or end_quote_char + convert_whitespace_escapes = ( convertWhitespaceEscapes and convert_whitespace_escapes ) quote_char = quoteChar or quote_char - # remove white space from quote chars - wont work anyway + # remove white space from quote chars quote_char = quote_char.strip() if not quote_char: raise ValueError("quote_char cannot be the empty string") - if endQuoteChar is None: - endQuoteChar = quote_char + if end_quote_char is None: + end_quote_char = quote_char else: - endQuoteChar = endQuoteChar.strip() - if not endQuoteChar: + end_quote_char = end_quote_char.strip() + if not end_quote_char: raise ValueError("end_quote_char cannot be the empty string") - self.quoteChar: str = quote_char - self.quoteCharLen: int = len(quote_char) - self.firstQuoteChar: str = quote_char[0] - self.endQuoteChar: str = endQuoteChar - self.endQuoteCharLen: int = len(endQuoteChar) - self.escChar: str = escChar or "" - self.escQuote: str = escQuote or "" - self.unquoteResults: bool = unquoteResults - self.convertWhitespaceEscapes: bool = convertWhitespaceEscapes + self.quote_char: str = quote_char + self.quote_char_len: int = len(quote_char) + self.first_quote_char: str = quote_char[0] + self.end_quote_char: str = end_quote_char + self.end_quote_char_len: int = len(end_quote_char) + self.esc_char: str = esc_char or "" + self.has_esc_char: bool = esc_char is not None + self.esc_quote: str = esc_quote or "" + self.unquote_results: bool = unquote_results + self.convert_whitespace_escapes: bool = convert_whitespace_escapes self.multiline = multiline + self.re_flags = re.RegexFlag(0) - sep = "" - inner_pattern = "" + # fmt: off + # build up re pattern for the content between the quote delimiters + inner_pattern = [] - if escQuote: - inner_pattern += rf"{sep}(?:{re.escape(escQuote)})" - sep = "|" + if esc_quote: + inner_pattern.append(rf"(?:{re.escape(esc_quote)})") - if escChar: - inner_pattern += rf"{sep}(?:{re.escape(escChar)}.)" - sep = "|" - self.escCharReplacePattern = re.escape(escChar) + "(.)" + if esc_char: + inner_pattern.append(rf"(?:{re.escape(esc_char)}.)") - if len(self.endQuoteChar) > 1: - inner_pattern += ( - f"{sep}(?:" + if len(self.end_quote_char) > 1: + inner_pattern.append( + "(?:" + "|".join( - f"(?:{re.escape(self.endQuoteChar[:i])}(?!{re.escape(self.endQuoteChar[i:])}))" - for i in range(len(self.endQuoteChar) - 1, 0, -1) + f"(?:{re.escape(self.end_quote_char[:i])}(?!{re.escape(self.end_quote_char[i:])}))" + for i in range(len(self.end_quote_char) - 1, 0, -1) ) + ")" ) - sep = "|" - - self.flags = re.RegexFlag(0) - if multiline: - self.flags = re.MULTILINE | re.DOTALL - inner_pattern += ( - rf"{sep}(?:[^{_escape_regex_range_chars(self.endQuoteChar[0])}" - rf"{(_escape_regex_range_chars(escChar) if escChar is not None else '')}])" + if self.multiline: + self.re_flags |= re.MULTILINE | re.DOTALL + inner_pattern.append( + rf"(?:[^{_escape_regex_range_chars(self.end_quote_char[0])}" + rf"{(_escape_regex_range_chars(esc_char) if self.has_esc_char else '')}])" ) else: - inner_pattern += ( - rf"{sep}(?:[^{_escape_regex_range_chars(self.endQuoteChar[0])}\n\r" - rf"{(_escape_regex_range_chars(escChar) if escChar is not None else '')}])" + inner_pattern.append( + rf"(?:[^{_escape_regex_range_chars(self.end_quote_char[0])}\n\r" + rf"{(_escape_regex_range_chars(esc_char) if self.has_esc_char else '')}])" ) self.pattern = "".join( [ - re.escape(self.quoteChar), + re.escape(self.quote_char), "(?:", - inner_pattern, + '|'.join(inner_pattern), ")*", - re.escape(self.endQuoteChar), + re.escape(self.end_quote_char), ] ) - if self.unquoteResults: - if self.convertWhitespaceEscapes: + if self.unquote_results: + if self.convert_whitespace_escapes: self.unquote_scan_re = re.compile( - rf"({'|'.join(re.escape(k) for k in self.ws_map)})|({re.escape(self.escChar)}.)|(\n|.)", - flags=self.flags, + rf"({'|'.join(re.escape(k) for k in self.ws_map)})" + rf"|({re.escape(self.esc_char)}.)" + rf"|(\n|.)", + flags=self.re_flags, ) else: self.unquote_scan_re = re.compile( - rf"({re.escape(self.escChar)}.)|(\n|.)", flags=self.flags + rf"({re.escape(self.esc_char)}.)" + rf"|(\n|.)", + flags=self.re_flags ) + # fmt: on try: - self.re = re.compile(self.pattern, self.flags) + self.re = re.compile(self.pattern, self.re_flags) self.reString = self.pattern self.re_match = self.re.match except re.error: raise ValueError(f"invalid pattern {self.pattern!r} passed to Regex") - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayIndexError = False self.mayReturnEmpty = True def _generateDefaultName(self) -> str: - if self.quoteChar == self.endQuoteChar and isinstance(self.quoteChar, str_type): - return f"string enclosed in {self.quoteChar!r}" + if self.quote_char == self.end_quote_char and isinstance( + self.quote_char, str_type + ): + return f"string enclosed in {self.quote_char!r}" - return f"quoted string, starting with {self.quoteChar} ending with {self.endQuoteChar}" + return f"quoted string, starting with {self.quote_char} ending with {self.end_quote_char}" def parseImpl(self, instring, loc, doActions=True): + # check first character of opening quote to see if that is a match + # before doing the more complicated regex match result = ( - instring[loc] == self.firstQuoteChar + instring[loc] == self.first_quote_char and self.re_match(instring, loc) or None ) if not result: raise ParseException(instring, loc, self.errmsg, self) + # get ending loc and matched string from regex matching result loc = result.end() ret = result.group() - if self.unquoteResults: + if self.unquote_results: # strip off quotes - ret = ret[self.quoteCharLen : -self.endQuoteCharLen] + ret = ret[self.quote_char_len : -self.end_quote_char_len] if isinstance(ret, str_type): - if self.convertWhitespaceEscapes: + # fmt: off + if self.convert_whitespace_escapes: + # as we iterate over matches in the input string, + # collect from whichever match group of the unquote_scan_re + # regex matches (only 1 group will match at any given time) ret = "".join( - self.ws_map[match.group(1)] - if match.group(1) - else match.group(2)[-1] - if match.group(2) + # match group 1 matches \t, \n, etc. + self.ws_map[match.group(1)] if match.group(1) + # match group 2 matches escaped characters + else match.group(2)[-1] if match.group(2) + # match group 3 matches any character else match.group(3) for match in self.unquote_scan_re.finditer(ret) ) else: ret = "".join( - match.group(1)[-1] if match.group(1) else match.group(2) + # match group 1 matches escaped characters + match.group(1)[-1] if match.group(1) + # match group 2 matches any character + else match.group(2) for match in self.unquote_scan_re.finditer(ret) ) + # fmt: on # replace escaped quotes - if self.escQuote: - ret = ret.replace(self.escQuote, self.endQuoteChar) + if self.esc_quote: + ret = ret.replace(self.esc_quote, self.end_quote_char) return loc, ret @@ -3407,8 +3386,8 @@ def __init__( if min < 1: raise ValueError( - "cannot specify a minimum length < 1; use " - "Opt(CharsNotIn()) if zero-length char group is permitted" + "cannot specify a minimum length < 1; use" + " Opt(CharsNotIn()) if zero-length char group is permitted" ) self.minLen = min @@ -3422,7 +3401,7 @@ def __init__( self.maxLen = exact self.minLen = exact - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayReturnEmpty = self.minLen == 0 self.mayIndexError = False @@ -3495,7 +3474,7 @@ def __init__(self, ws: str = " \t\r\n", min: int = 1, max: int = 0, exact: int = ) # self.leave_whitespace() self.mayReturnEmpty = True - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.minLen = min @@ -3544,16 +3523,19 @@ def __init__(self, colno: int): self.col = colno def preParse(self, instring: str, loc: int) -> int: - if col(loc, instring) != self.col: - instrlen = len(instring) - if self.ignoreExprs: - loc = self._skipIgnorables(instring, loc) - while ( - loc < instrlen - and instring[loc].isspace() - and col(loc, instring) != self.col - ): - loc += 1 + if col(loc, instring) == self.col: + return loc + + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables(instring, loc) + while ( + loc < instrlen + and instring[loc].isspace() + and col(loc, instring) != self.col + ): + loc += 1 + return loc def parseImpl(self, instring, loc, doActions=True): @@ -3599,12 +3581,14 @@ def __init__(self): def preParse(self, instring: str, loc: int) -> int: if loc == 0: return loc - else: - ret = self.skipper.preParse(instring, loc) - if "\n" in self.orig_whiteChars: - while instring[ret : ret + 1] == "\n": - ret = self.skipper.preParse(instring, ret + 1) - return ret + + ret = self.skipper.preParse(instring, loc) + + if "\n" in self.orig_whiteChars: + while instring[ret : ret + 1] == "\n": + ret = self.skipper.preParse(instring, ret + 1) + + return ret def parseImpl(self, instring, loc, doActions=True): if col(loc, instring) == 1: @@ -3645,10 +3629,10 @@ def __init__(self): self.errmsg = "Expected start of text" def parseImpl(self, instring, loc, doActions=True): - if loc != 0: - # see if entire string up to here is just whitespace and ignoreables - if loc != self.preParse(instring, 0): - raise ParseException(instring, loc, self.errmsg, self) + # see if entire string up to here is just whitespace and ignoreables + if loc != 0 and loc != self.preParse(instring, 0): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] @@ -3664,12 +3648,12 @@ def __init__(self): def parseImpl(self, instring, loc, doActions=True): if loc < len(instring): raise ParseException(instring, loc, self.errmsg, self) - elif loc == len(instring): + if loc == len(instring): return loc + 1, [] - elif loc > len(instring): + if loc > len(instring): return loc, [] - else: - raise ParseException(instring, loc, self.errmsg, self) + + raise ParseException(instring, loc, self.errmsg, self) class WordStart(PositionToken): @@ -3802,7 +3786,7 @@ def ignore(self, other) -> ParserElement: return self def _generateDefaultName(self) -> str: - return f"{self.__class__.__name__}:({str(self.exprs)})" + return f"{type(self).__name__}:({self.exprs})" def streamline(self) -> ParserElement: if self.streamlined: @@ -3841,7 +3825,7 @@ def streamline(self) -> ParserElement: self.mayReturnEmpty |= other.mayReturnEmpty self.mayIndexError |= other.mayIndexError - self.errmsg = "Expected " + str(self) + self.errmsg = f"Expected {self}" return self @@ -3863,38 +3847,36 @@ def copy(self) -> ParserElement: return ret def _setResultsName(self, name, listAllMatches=False): - if ( + if not ( __diag__.warn_ungrouped_named_tokens_in_collection and Diagnostics.warn_ungrouped_named_tokens_in_collection not in self.suppress_warnings_ ): - for e in self.exprs: - if ( - isinstance(e, ParserElement) - and e.resultsName - and Diagnostics.warn_ungrouped_named_tokens_in_collection + return super()._setResultsName(name, listAllMatches) + + for e in self.exprs: + if ( + isinstance(e, ParserElement) + and e.resultsName + and ( + Diagnostics.warn_ungrouped_named_tokens_in_collection not in e.suppress_warnings_ - ): - warnings.warn( - "{}: setting results name {!r} on {} expression " - "collides with {!r} on contained expression".format( - "warn_ungrouped_named_tokens_in_collection", - name, - type(self).__name__, - e.resultsName, - ), - stacklevel=3, - ) + ) + ): + warning = ( + "warn_ungrouped_named_tokens_in_collection:" + f" setting results name {name!r} on {type(self).__name__} expression" + f" collides with {e.resultsName!r} on contained expression" + ) + warnings.warn(warning, stacklevel=3) + break return super()._setResultsName(name, listAllMatches) # Compatibility synonyms # fmt: off - @replaced_by_pep8(leave_whitespace) - def leaveWhitespace(self): ... - - @replaced_by_pep8(ignore_whitespace) - def ignoreWhitespace(self): ... + leaveWhitespace = replaced_by_pep8("leaveWhitespace", leave_whitespace) + ignoreWhitespace = replaced_by_pep8("ignoreWhitespace", ignore_whitespace) # fmt: on @@ -3931,18 +3913,18 @@ def __init__( if exprs and Ellipsis in exprs: tmp = [] for i, expr in enumerate(exprs): - if expr is Ellipsis: - if i < len(exprs) - 1: - skipto_arg: ParserElement = typing.cast( - ParseExpression, (Empty() + exprs[i + 1]) - ).exprs[-1] - tmp.append(SkipTo(skipto_arg)("_skipped*")) - else: - raise Exception( - "cannot construct And with sequence ending in ..." - ) - else: + if expr is not Ellipsis: tmp.append(expr) + continue + + if i < len(exprs) - 1: + skipto_arg: ParserElement = typing.cast( + ParseExpression, (Empty() + exprs[i + 1]) + ).exprs[-1] + tmp.append(SkipTo(skipto_arg)("_skipped*")) + continue + + raise Exception("cannot construct And with sequence ending in ...") exprs[:] = tmp super().__init__(exprs, savelist) if self.exprs: @@ -3961,25 +3943,24 @@ def __init__( def streamline(self) -> ParserElement: # collapse any _PendingSkip's - if self.exprs: - if any( - isinstance(e, ParseExpression) - and e.exprs - and isinstance(e.exprs[-1], _PendingSkip) - for e in self.exprs[:-1] - ): - deleted_expr_marker = NoMatch() - for i, e in enumerate(self.exprs[:-1]): - if e is deleted_expr_marker: - continue - if ( - isinstance(e, ParseExpression) - and e.exprs - and isinstance(e.exprs[-1], _PendingSkip) - ): - e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] - self.exprs[i + 1] = deleted_expr_marker - self.exprs = [e for e in self.exprs if e is not deleted_expr_marker] + if self.exprs and any( + isinstance(e, ParseExpression) + and e.exprs + and isinstance(e.exprs[-1], _PendingSkip) + for e in self.exprs[:-1] + ): + deleted_expr_marker = NoMatch() + for i, e in enumerate(self.exprs[:-1]): + if e is deleted_expr_marker: + continue + if ( + isinstance(e, ParseExpression) + and e.exprs + and isinstance(e.exprs[-1], _PendingSkip) + ): + e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] + self.exprs[i + 1] = deleted_expr_marker + self.exprs = [e for e in self.exprs if e is not deleted_expr_marker] super().streamline() @@ -4058,7 +4039,7 @@ def _generateDefaultName(self) -> str: # strip off redundant inner {}'s while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": inner = inner[1:-1] - return "{" + inner + "}" + return f"{{{inner}}}" class Or(ParseExpression): @@ -4179,10 +4160,8 @@ def parseImpl(self, instring, loc, doActions=True): if maxExcLoc == loc: maxException.msg = self.errmsg raise maxException - else: - raise ParseException( - instring, loc, "no defined alternatives to match", self - ) + + raise ParseException(instring, loc, "no defined alternatives to match", self) def __ixor__(self, other): if isinstance(other, str_type): @@ -4192,7 +4171,7 @@ def __ixor__(self, other): return self.append(other) # Or([self, other]) def _generateDefaultName(self) -> str: - return "{" + " ^ ".join(str(e) for e in self.exprs) + "}" + return f"{{{' ^ '.join(str(e) for e in self.exprs)}}}" def _setResultsName(self, name, listAllMatches=False): if ( @@ -4206,17 +4185,14 @@ def _setResultsName(self, name, listAllMatches=False): not in e.suppress_warnings_ for e in self.exprs ): - warnings.warn( - "{}: setting results name {!r} on {} expression " - "will return a list of all parsed tokens in an And alternative, " - "in prior versions only the first token was returned; enclose " - "contained argument in Group".format( - "warn_multiple_tokens_in_named_alternation", - name, - type(self).__name__, - ), - stacklevel=3, + warning = ( + "warn_multiple_tokens_in_named_alternation:" + f" setting results name {name!r} on {type(self).__name__} expression" + " will return a list of all parsed tokens in an And alternative," + " in prior versions only the first token was returned; enclose" + " contained argument in Group" ) + warnings.warn(warning, stacklevel=3) return super()._setResultsName(name, listAllMatches) @@ -4269,11 +4245,7 @@ def parseImpl(self, instring, loc, doActions=True): for e in self.exprs: try: - return e._parse( - instring, - loc, - doActions, - ) + return e._parse(instring, loc, doActions) except ParseFatalException as pfe: pfe.__traceback__ = None pfe.parser_element = e @@ -4295,10 +4267,8 @@ def parseImpl(self, instring, loc, doActions=True): if maxExcLoc == loc: maxException.msg = self.errmsg raise maxException - else: - raise ParseException( - instring, loc, "no defined alternatives to match", self - ) + + raise ParseException(instring, loc, "no defined alternatives to match", self) def __ior__(self, other): if isinstance(other, str_type): @@ -4308,7 +4278,7 @@ def __ior__(self, other): return self.append(other) # MatchFirst([self, other]) def _generateDefaultName(self) -> str: - return "{" + " | ".join(str(e) for e in self.exprs) + "}" + return f"{{{' | '.join(str(e) for e in self.exprs)}}}" def _setResultsName(self, name, listAllMatches=False): if ( @@ -4322,17 +4292,14 @@ def _setResultsName(self, name, listAllMatches=False): not in e.suppress_warnings_ for e in self.exprs ): - warnings.warn( - "{}: setting results name {!r} on {} expression " - "will return a list of all parsed tokens in an And alternative, " - "in prior versions only the first token was returned; enclose " - "contained argument in Group".format( - "warn_multiple_tokens_in_named_alternation", - name, - type(self).__name__, - ), - stacklevel=3, + warning = ( + "warn_multiple_tokens_in_named_alternation:" + f" setting results name {name!r} on {type(self).__name__} expression" + " will return a list of all parsed tokens in an And alternative," + " in prior versions only the first token was returned; enclose" + " contained argument in Group" ) + warnings.warn(warning, stacklevel=3) return super()._setResultsName(name, listAllMatches) @@ -4508,7 +4475,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, total_results def _generateDefaultName(self) -> str: - return "{" + " & ".join(str(e) for e in self.exprs) + "}" + return f"{{{' & '.join(str(e) for e in self.exprs)}}}" class ParseElementEnhance(ParserElement): @@ -4543,15 +4510,17 @@ def recurse(self) -> List[ParserElement]: return [self.expr] if self.expr is not None else [] def parseImpl(self, instring, loc, doActions=True): - if self.expr is not None: - try: - return self.expr._parse(instring, loc, doActions, callPreParse=False) - except ParseBaseException as pbe: - pbe.msg = self.errmsg - raise - else: + if self.expr is None: raise ParseException(instring, loc, "No expression defined", self) + try: + return self.expr._parse(instring, loc, doActions, callPreParse=False) + except ParseBaseException as pbe: + if not isinstance(self, Forward) or self.customName is not None: + if self.errmsg: + pbe.msg = self.errmsg + raise + def leave_whitespace(self, recursive: bool = True) -> ParserElement: super().leave_whitespace(recursive) @@ -4571,15 +4540,11 @@ def ignore_whitespace(self, recursive: bool = True) -> ParserElement: return self def ignore(self, other) -> ParserElement: - if isinstance(other, Suppress): - if other not in self.ignoreExprs: - super().ignore(other) - if self.expr is not None: - self.expr.ignore(self.ignoreExprs[-1]) - else: + if not isinstance(other, Suppress) or other not in self.ignoreExprs: super().ignore(other) if self.expr is not None: self.expr.ignore(self.ignoreExprs[-1]) + return self def streamline(self) -> ParserElement: @@ -4609,15 +4574,12 @@ def validate(self, validateTrace=None) -> None: self._checkRecursion([]) def _generateDefaultName(self) -> str: - return f"{self.__class__.__name__}:({str(self.expr)})" + return f"{type(self).__name__}:({self.expr})" # Compatibility synonyms # fmt: off - @replaced_by_pep8(leave_whitespace) - def leaveWhitespace(self): ... - - @replaced_by_pep8(ignore_whitespace) - def ignoreWhitespace(self): ... + leaveWhitespace = replaced_by_pep8("leaveWhitespace", leave_whitespace) + ignoreWhitespace = replaced_by_pep8("ignoreWhitespace", ignore_whitespace) # fmt: on @@ -4827,7 +4789,7 @@ def __init__( retreat = 0 self.exact = True self.retreat = retreat - self.errmsg = "not preceded by " + str(expr) + self.errmsg = f"not preceded by {expr}" self.skipWhitespace = False self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) @@ -4837,23 +4799,24 @@ def parseImpl(self, instring, loc=0, doActions=True): raise ParseException(instring, loc, self.errmsg) start = loc - self.retreat _, ret = self.expr._parse(instring, start) - else: - # retreat specified a maximum lookbehind window, iterate - test_expr = self.expr + StringEnd() - instring_slice = instring[max(0, loc - self.retreat) : loc] - last_expr = ParseException(instring, loc, self.errmsg) - for offset in range(1, min(loc, self.retreat + 1) + 1): - try: - # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) - _, ret = test_expr._parse( - instring_slice, len(instring_slice) - offset - ) - except ParseBaseException as pbe: - last_expr = pbe - else: - break + return loc, ret + + # retreat specified a maximum lookbehind window, iterate + test_expr = self.expr + StringEnd() + instring_slice = instring[max(0, loc - self.retreat) : loc] + last_expr = ParseException(instring, loc, self.errmsg) + + for offset in range(1, min(loc, self.retreat + 1) + 1): + try: + # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) + except ParseBaseException as pbe: + last_expr = pbe else: - raise last_expr + break + else: + raise last_expr + return loc, ret @@ -4931,7 +4894,7 @@ def __init__(self, expr: Union[ParserElement, str]): self.skipWhitespace = False self.mayReturnEmpty = True - self.errmsg = "Found unwanted token, " + str(self.expr) + self.errmsg = f"Found unwanted token, {self.expr}" def parseImpl(self, instring, loc, doActions=True): if self.expr.can_parse_next(instring, loc, do_actions=doActions): @@ -4939,7 +4902,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, [] def _generateDefaultName(self) -> str: - return "~{" + str(self.expr) + "}" + return f"~{{{self.expr}}}" class _MultipleMatch(ParseElementEnhance): @@ -5002,19 +4965,18 @@ def _setResultsName(self, name, listAllMatches=False): if ( isinstance(e, ParserElement) and e.resultsName - and Diagnostics.warn_ungrouped_named_tokens_in_collection - not in e.suppress_warnings_ + and ( + Diagnostics.warn_ungrouped_named_tokens_in_collection + not in e.suppress_warnings_ + ) ): - warnings.warn( - "{}: setting results name {!r} on {} expression " - "collides with {!r} on contained expression".format( - "warn_ungrouped_named_tokens_in_collection", - name, - type(self).__name__, - e.resultsName, - ), - stacklevel=3, + warning = ( + "warn_ungrouped_named_tokens_in_collection:" + f" setting results name {name!r} on {type(self).__name__} expression" + f" collides with {e.resultsName!r} on contained expression" ) + warnings.warn(warning, stacklevel=3) + break return super()._setResultsName(name, listAllMatches) @@ -5048,7 +5010,7 @@ class OneOrMore(_MultipleMatch): """ def _generateDefaultName(self) -> str: - return "{" + str(self.expr) + "}..." + return f"{{{self.expr}}}..." class ZeroOrMore(_MultipleMatch): @@ -5082,7 +5044,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, ParseResults([], name=self.resultsName) def _generateDefaultName(self) -> str: - return "[" + str(self.expr) + "]..." + return f"[{self.expr}]..." class DelimitedList(ParseElementEnhance): @@ -5117,12 +5079,11 @@ def __init__( expr = ParserElement._literalStringClass(expr) expr = typing.cast(ParserElement, expr) - if min is not None: - if min < 1: - raise ValueError("min must be greater than 0") - if max is not None: - if min is not None and max < min: - raise ValueError("max must be greater than, or equal to min") + if min is not None and min < 1: + raise ValueError("min must be greater than 0") + + if max is not None and min is not None and max < min: + raise ValueError("max must be greater than, or equal to min") self.content = expr self.raw_delim = str(delim) @@ -5147,7 +5108,8 @@ def __init__( super().__init__(delim_list_expr, savelist=True) def _generateDefaultName(self) -> str: - return "{0} [{1} {0}]...".format(self.content.streamline(), self.raw_delim) + content_expr = self.content.streamline() + return f"{content_expr} [{self.raw_delim} {content_expr}]..." class _NullToken: @@ -5229,7 +5191,7 @@ def _generateDefaultName(self) -> str: # strip off redundant inner {}'s while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": inner = inner[1:-1] - return "[" + inner + "]" + return f"[{inner}]" Optional = Opt @@ -5308,8 +5270,7 @@ def __init__( ): super().__init__(other) failOn = failOn or fail_on - if ignore is not None: - self.ignore(ignore) + self.ignoreExpr = ignore self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include @@ -5319,6 +5280,20 @@ def __init__( else: self.failOn = failOn self.errmsg = "No match found for " + str(self.expr) + self.ignorer = Empty().leave_whitespace() + self._update_ignorer() + + def _update_ignorer(self): + # rebuild internal ignore expr from current ignore exprs and assigned ignoreExpr + self.ignorer.ignoreExprs.clear() + for e in self.expr.ignoreExprs: + self.ignorer.ignore(e) + if self.ignoreExpr: + self.ignorer.ignore(self.ignoreExpr) + + def ignore(self, expr): + super().ignore(expr) + self._update_ignorer() def parseImpl(self, instring, loc, doActions=True): startloc = loc @@ -5327,7 +5302,7 @@ def parseImpl(self, instring, loc, doActions=True): self_failOn_canParseNext = ( self.failOn.canParseNext if self.failOn is not None else None ) - self_preParse = self.preParse if self.callPreparse else None + ignorer_try_parse = self.ignorer.try_parse if self.ignorer.ignoreExprs else None tmploc = loc while tmploc <= instrlen: @@ -5336,9 +5311,18 @@ def parseImpl(self, instring, loc, doActions=True): if self_failOn_canParseNext(instring, tmploc): break - if self_preParse is not None: - # skip grammar-ignored expressions - tmploc = self_preParse(instring, tmploc) + if ignorer_try_parse is not None: + # advance past ignore expressions + prev_tmploc = tmploc + while 1: + try: + tmploc = ignorer_try_parse(instring, tmploc) + except ParseBaseException: + break + # see if all ignorers matched, but didn't actually ignore anything + if tmploc == prev_tmploc: + break + prev_tmploc = tmploc try: self_expr_parse(instring, tmploc, doActions=False, callPreParse=False) @@ -5542,14 +5526,13 @@ def parseImpl(self, instring, loc, doActions=True): del memo[peek_key] return prev_loc, prev_peek.copy() # the match did get better: see if we can improve further - else: - if doActions: - try: - memo[act_key] = super().parseImpl(instring, loc, True) - except ParseException as e: - memo[peek_key] = memo[act_key] = (new_loc, e) - raise - prev_loc, prev_peek = memo[peek_key] = new_loc, new_peek + if doActions: + try: + memo[act_key] = super().parseImpl(instring, loc, True) + except ParseException as e: + memo[peek_key] = memo[act_key] = (new_loc, e) + raise + prev_loc, prev_peek = memo[peek_key] = new_loc, new_peek def leave_whitespace(self, recursive: bool = True) -> ParserElement: self.skipWhitespace = False @@ -5593,7 +5576,7 @@ def _generateDefaultName(self) -> str: else: retString = "None" finally: - return self.__class__.__name__ + ": " + retString + return f"{type(self).__name__}: {retString}" def copy(self) -> ParserElement: if self.expr is not None: @@ -5604,29 +5587,26 @@ def copy(self) -> ParserElement: return ret def _setResultsName(self, name, list_all_matches=False): + # fmt: off if ( __diag__.warn_name_set_on_empty_Forward - and Diagnostics.warn_name_set_on_empty_Forward - not in self.suppress_warnings_ + and Diagnostics.warn_name_set_on_empty_Forward not in self.suppress_warnings_ + and self.expr is None ): - if self.expr is None: - warnings.warn( - "{}: setting results name {!r} on {} expression " - "that has no contained expression".format( - "warn_name_set_on_empty_Forward", name, type(self).__name__ - ), - stacklevel=3, - ) + warning = ( + "warn_name_set_on_empty_Forward:" + f" setting results name {name!r} on {type(self).__name__} expression" + " that has no contained expression" + ) + warnings.warn(warning, stacklevel=3) + # fmt: on return super()._setResultsName(name, list_all_matches) # Compatibility synonyms # fmt: off - @replaced_by_pep8(leave_whitespace) - def leaveWhitespace(self): ... - - @replaced_by_pep8(ignore_whitespace) - def ignoreWhitespace(self): ... + leaveWhitespace = replaced_by_pep8("leaveWhitespace", leave_whitespace) + ignoreWhitespace = replaced_by_pep8("ignoreWhitespace", ignore_whitespace) # fmt: on @@ -5730,8 +5710,8 @@ def postParse(self, instring, loc, tokenlist): if isinstance(tokenlist, ParseResults) else list(tokenlist) ) - else: - return [tokenlist] + + return [tokenlist] class Dict(TokenConverter): @@ -5817,8 +5797,8 @@ def postParse(self, instring, loc, tokenlist): if self._asPythonDict: return [tokenlist.as_dict()] if self.resultsName else tokenlist.as_dict() - else: - return [tokenlist] if self.resultsName else tokenlist + + return [tokenlist] if self.resultsName else tokenlist class Suppress(TokenConverter): @@ -5860,14 +5840,14 @@ def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): def __add__(self, other) -> "ParserElement": if isinstance(self.expr, _PendingSkip): return Suppress(SkipTo(other)) + other - else: - return super().__add__(other) + + return super().__add__(other) def __sub__(self, other) -> "ParserElement": if isinstance(self.expr, _PendingSkip): return Suppress(SkipTo(other)) - other - else: - return super().__sub__(other) + + return super().__sub__(other) def postParse(self, instring, loc, tokenlist): return [] @@ -5907,7 +5887,7 @@ def z(*paArgs): thisFunc = f.__name__ s, l, t = paArgs[-3:] if len(paArgs) > 3: - thisFunc = paArgs[0].__class__.__name__ + "." + thisFunc + thisFunc = f"{type(paArgs[0]).__name__}.{thisFunc}" sys.stderr.write(f">>entering {thisFunc}(line: {line(l, s)!r}, {l}, {t!r})\n") try: ret = f(*paArgs) @@ -5975,8 +5955,8 @@ def srange(s: str) -> str: - any combination of the above (``'aeiouy'``, ``'a-zA-Z0-9_$'``, etc.) """ - _expanded = ( - lambda p: p + _expanded = lambda p: ( + p if not isinstance(p, ParseResults) else "".join(chr(c) for c in range(ord(p[0]), ord(p[1]) + 1)) ) @@ -6100,16 +6080,8 @@ def autoname_elements() -> None: lineEnd = line_end stringStart = string_start stringEnd = string_end - -@replaced_by_pep8(null_debug_action) -def nullDebugAction(): ... - -@replaced_by_pep8(trace_parse_action) -def traceParseAction(): ... - -@replaced_by_pep8(condition_as_parse_action) -def conditionAsParseAction(): ... - -@replaced_by_pep8(token_map) -def tokenMap(): ... +nullDebugAction = replaced_by_pep8("nullDebugAction", null_debug_action) +traceParseAction = replaced_by_pep8("traceParseAction", trace_parse_action) +conditionAsParseAction = replaced_by_pep8("conditionAsParseAction", condition_as_parse_action) +tokenMap = replaced_by_pep8("tokenMap", token_map) # fmt: on diff --git a/src/pip/_vendor/pyparsing/diagram/__init__.py b/src/pip/_vendor/pyparsing/diagram/__init__.py index 83f9018ee93..6074d2bfd0f 100644 --- a/src/pip/_vendor/pyparsing/diagram/__init__.py +++ b/src/pip/_vendor/pyparsing/diagram/__init__.py @@ -473,7 +473,7 @@ def _to_diagram_element( :param show_groups: bool flag indicating whether to show groups using bounding box """ exprs = element.recurse() - name = name_hint or element.customName or element.__class__.__name__ + name = name_hint or element.customName or type(element).__name__ # Python's id() is used to provide a unique identifier for elements el_id = id(element) diff --git a/src/pip/_vendor/pyparsing/exceptions.py b/src/pip/_vendor/pyparsing/exceptions.py index 12219f124ae..1aaea56f54d 100644 --- a/src/pip/_vendor/pyparsing/exceptions.py +++ b/src/pip/_vendor/pyparsing/exceptions.py @@ -14,11 +14,13 @@ from .unicode import pyparsing_unicode as ppu -class ExceptionWordUnicode(ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cyrillic): +class _ExceptionWordUnicodeSet( + ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cyrillic +): pass -_extract_alphanums = _collapse_string_to_ranges(ExceptionWordUnicode.alphanums) +_extract_alphanums = _collapse_string_to_ranges(_ExceptionWordUnicodeSet.alphanums) _exception_word_extractor = re.compile("([" + _extract_alphanums + "]{1,16})|.") @@ -86,41 +88,39 @@ def explain_exception(exc, depth=16): ret.append(" " * (exc.column - 1) + "^") ret.append(f"{type(exc).__name__}: {exc}") - if depth > 0: - callers = inspect.getinnerframes(exc.__traceback__, context=depth) - seen = set() - for i, ff in enumerate(callers[-depth:]): - frm = ff[0] - - f_self = frm.f_locals.get("self", None) - if isinstance(f_self, ParserElement): - if not frm.f_code.co_name.startswith( - ("parseImpl", "_parseNoCache") - ): - continue - if id(f_self) in seen: - continue - seen.add(id(f_self)) - - self_type = type(f_self) - ret.append( - f"{self_type.__module__}.{self_type.__name__} - {f_self}" - ) - - elif f_self is not None: - self_type = type(f_self) - ret.append(f"{self_type.__module__}.{self_type.__name__}") + if depth <= 0: + return "\n".join(ret) - else: - code = frm.f_code - if code.co_name in ("wrapper", ""): - continue + callers = inspect.getinnerframes(exc.__traceback__, context=depth) + seen = set() + for ff in callers[-depth:]: + frm = ff[0] + + f_self = frm.f_locals.get("self", None) + if isinstance(f_self, ParserElement): + if not frm.f_code.co_name.startswith(("parseImpl", "_parseNoCache")): + continue + if id(f_self) in seen: + continue + seen.add(id(f_self)) + + self_type = type(f_self) + ret.append(f"{self_type.__module__}.{self_type.__name__} - {f_self}") + + elif f_self is not None: + self_type = type(f_self) + ret.append(f"{self_type.__module__}.{self_type.__name__}") + + else: + code = frm.f_code + if code.co_name in ("wrapper", ""): + continue - ret.append(code.co_name) + ret.append(code.co_name) - depth -= 1 - if not depth: - break + depth -= 1 + if not depth: + break return "\n".join(ret) @@ -220,8 +220,10 @@ def explain(self, depth=16) -> str: Example:: + # an expression to parse 3 integers expr = pp.Word(pp.nums) * 3 try: + # a failing parse - the third integer is prefixed with "A" expr.parse_string("123 456 A789") except pp.ParseException as pe: print(pe.explain(depth=0)) @@ -244,8 +246,7 @@ def explain(self, depth=16) -> str: return self.explain_exception(self, depth) # fmt: off - @replaced_by_pep8(mark_input_line) - def markInputline(self): ... + markInputline = replaced_by_pep8("markInputline", mark_input_line) # fmt: on @@ -255,16 +256,16 @@ class ParseException(ParseBaseException): Example:: + integer = Word(nums).set_name("integer") try: - Word(nums).set_name("integer").parse_string("ABC") + integer.parse_string("ABC") except ParseException as pe: print(pe) - print("column: {}".format(pe.column)) + print(f"column: {pe.column}") prints:: - Expected integer (at char 0), (line:1, col:1) - column: 1 + Expected integer (at char 0), (line:1, col:1) column: 1 """ diff --git a/src/pip/_vendor/pyparsing/helpers.py b/src/pip/_vendor/pyparsing/helpers.py index 018f0d6ac86..dcfdb8fe4bf 100644 --- a/src/pip/_vendor/pyparsing/helpers.py +++ b/src/pip/_vendor/pyparsing/helpers.py @@ -74,7 +74,7 @@ def count_field_parse_action(s, l, t): intExpr = intExpr.copy() intExpr.set_name("arrayLen") intExpr.add_parse_action(count_field_parse_action, call_during_try=True) - return (intExpr + array_expr).set_name("(len) " + str(expr) + "...") + return (intExpr + array_expr).set_name(f"(len) {expr}...") def match_previous_literal(expr: ParserElement) -> ParserElement: @@ -95,15 +95,17 @@ def match_previous_literal(expr: ParserElement) -> ParserElement: rep = Forward() def copy_token_to_repeater(s, l, t): - if t: - if len(t) == 1: - rep << t[0] - else: - # flatten t tokens - tflat = _flatten(t.as_list()) - rep << And(Literal(tt) for tt in tflat) - else: + if not t: rep << Empty() + return + + if len(t) == 1: + rep << t[0] + return + + # flatten t tokens + tflat = _flatten(t.as_list()) + rep << And(Literal(tt) for tt in tflat) expr.add_parse_action(copy_token_to_repeater, callDuringTry=True) rep.set_name("(prev) " + str(expr)) @@ -230,7 +232,7 @@ def one_of( if isequal(other, cur): del symbols[i + j + 1] break - elif masks(cur, other): + if masks(cur, other): del symbols[i + j + 1] symbols.insert(i, other) break @@ -534,7 +536,9 @@ def nested_expr( ) else: ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) - ret.set_name("nested %s%s expression" % (opener, closer)) + ret.set_name(f"nested {opener}{closer} expression") + # don't override error message from content expressions + ret.errmsg = None return ret @@ -580,7 +584,7 @@ def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")) ) closeTag = Combine(Literal("", adjacent=False) - openTag.set_name("<%s>" % resname) + openTag.set_name(f"<{resname}>") # add start results name in parse action now that ungrouped names are not reported at two levels openTag.add_parse_action( lambda t: t.__setitem__( @@ -589,7 +593,7 @@ def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")) ) closeTag = closeTag( "end" + "".join(resname.replace(":", " ").title().split()) - ).set_name("" % resname) + ).set_name(f"") openTag.tag = resname closeTag.tag = resname openTag.tag_body = SkipTo(closeTag()) @@ -777,7 +781,7 @@ def parseImpl(self, instring, loc, doActions=True): rpar = Suppress(rpar) # if lpar and rpar are not suppressed, wrap in group - if not (isinstance(rpar, Suppress) and isinstance(rpar, Suppress)): + if not (isinstance(lpar, Suppress) and isinstance(rpar, Suppress)): lastExpr = base_expr | Group(lpar + ret + rpar) else: lastExpr = base_expr | (lpar + ret + rpar) @@ -787,7 +791,7 @@ def parseImpl(self, instring, loc, doActions=True): pa: typing.Optional[ParseAction] opExpr1: ParserElement opExpr2: ParserElement - for i, operDef in enumerate(op_list): + for operDef in op_list: opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] # type: ignore[assignment] if isinstance(opExpr, str_type): opExpr = ParserElement._literalStringClass(opExpr) @@ -1058,43 +1062,17 @@ def delimited_list( cppStyleComment = cpp_style_comment javaStyleComment = java_style_comment pythonStyleComment = python_style_comment - -@replaced_by_pep8(DelimitedList) -def delimitedList(): ... - -@replaced_by_pep8(DelimitedList) -def delimited_list(): ... - -@replaced_by_pep8(counted_array) -def countedArray(): ... - -@replaced_by_pep8(match_previous_literal) -def matchPreviousLiteral(): ... - -@replaced_by_pep8(match_previous_expr) -def matchPreviousExpr(): ... - -@replaced_by_pep8(one_of) -def oneOf(): ... - -@replaced_by_pep8(dict_of) -def dictOf(): ... - -@replaced_by_pep8(original_text_for) -def originalTextFor(): ... - -@replaced_by_pep8(nested_expr) -def nestedExpr(): ... - -@replaced_by_pep8(make_html_tags) -def makeHTMLTags(): ... - -@replaced_by_pep8(make_xml_tags) -def makeXMLTags(): ... - -@replaced_by_pep8(replace_html_entity) -def replaceHTMLEntity(): ... - -@replaced_by_pep8(infix_notation) -def infixNotation(): ... +delimitedList = replaced_by_pep8("delimitedList", DelimitedList) +delimited_list = replaced_by_pep8("delimited_list", DelimitedList) +countedArray = replaced_by_pep8("countedArray", counted_array) +matchPreviousLiteral = replaced_by_pep8("matchPreviousLiteral", match_previous_literal) +matchPreviousExpr = replaced_by_pep8("matchPreviousExpr", match_previous_expr) +oneOf = replaced_by_pep8("oneOf", one_of) +dictOf = replaced_by_pep8("dictOf", dict_of) +originalTextFor = replaced_by_pep8("originalTextFor", original_text_for) +nestedExpr = replaced_by_pep8("nestedExpr", nested_expr) +makeHTMLTags = replaced_by_pep8("makeHTMLTags", make_html_tags) +makeXMLTags = replaced_by_pep8("makeXMLTags", make_xml_tags) +replaceHTMLEntity = replaced_by_pep8("replaceHTMLEntity", replace_html_entity) +infixNotation = replaced_by_pep8("infixNotation", infix_notation) # fmt: on diff --git a/src/pip/_vendor/pyparsing/results.py b/src/pip/_vendor/pyparsing/results.py index 0313049763b..3e5fe2089bd 100644 --- a/src/pip/_vendor/pyparsing/results.py +++ b/src/pip/_vendor/pyparsing/results.py @@ -173,42 +173,48 @@ def __init__( ): self._tokdict: Dict[str, _ParseResultsWithOffset] self._modal = modal - if name is not None and name != "": - if isinstance(name, int): - name = str(name) - if not modal: - self._all_names = {name} - self._name = name - if toklist not in self._null_values: - if isinstance(toklist, (str_type, type)): - toklist = [toklist] - if asList: - if isinstance(toklist, ParseResults): - self[name] = _ParseResultsWithOffset( - ParseResults(toklist._toklist), 0 - ) - else: - self[name] = _ParseResultsWithOffset( - ParseResults(toklist[0]), 0 - ) - self[name]._name = name - else: - try: - self[name] = toklist[0] - except (KeyError, TypeError, IndexError): - if toklist is not self: - self[name] = toklist - else: - self._name = name + + if name is None or name == "": + return + + if isinstance(name, int): + name = str(name) + + if not modal: + self._all_names = {name} + + self._name = name + + if toklist in self._null_values: + return + + if isinstance(toklist, (str_type, type)): + toklist = [toklist] + + if asList: + if isinstance(toklist, ParseResults): + self[name] = _ParseResultsWithOffset(ParseResults(toklist._toklist), 0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0) + self[name]._name = name + return + + try: + self[name] = toklist[0] + except (KeyError, TypeError, IndexError): + if toklist is not self: + self[name] = toklist + else: + self._name = name def __getitem__(self, i): if isinstance(i, (int, slice)): return self._toklist[i] - else: - if i not in self._all_names: - return self._tokdict[i][-1][0] - else: - return ParseResults([v[0] for v in self._tokdict[i]]) + + if i not in self._all_names: + return self._tokdict[i][-1][0] + + return ParseResults([v[0] for v in self._tokdict[i]]) def __setitem__(self, k, v, isinstance=isinstance): if isinstance(v, _ParseResultsWithOffset): @@ -226,27 +232,28 @@ def __setitem__(self, k, v, isinstance=isinstance): sub._parent = self def __delitem__(self, i): - if isinstance(i, (int, slice)): - mylen = len(self._toklist) - del self._toklist[i] - - # convert int to slice - if isinstance(i, int): - if i < 0: - i += mylen - i = slice(i, i + 1) - # get removed indices - removed = list(range(*i.indices(mylen))) - removed.reverse() - # fixup indices in token dictionary - for name, occurrences in self._tokdict.items(): - for j in removed: - for k, (value, position) in enumerate(occurrences): - occurrences[k] = _ParseResultsWithOffset( - value, position - (position > j) - ) - else: + if not isinstance(i, (int, slice)): del self._tokdict[i] + return + + mylen = len(self._toklist) + del self._toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i + 1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for occurrences in self._tokdict.values(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset( + value, position - (position > j) + ) def __contains__(self, k) -> bool: return k in self._tokdict @@ -376,7 +383,7 @@ def insert_locn(locn, tokens): """ self._toklist.insert(index, ins_string) # fixup indices in token dictionary - for name, occurrences in self._tokdict.items(): + for occurrences in self._tokdict.values(): for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset( value, position + (position > index) @@ -652,58 +659,52 @@ def dump(self, indent="", full=True, include_list=True, _depth=0) -> str: NL = "\n" out.append(indent + str(self.as_list()) if include_list else "") - if full: - if self.haskeys(): - items = sorted((str(k), v) for k, v in self.items()) - for k, v in items: - if out: - out.append(NL) - out.append(f"{indent}{(' ' * _depth)}- {k}: ") - if isinstance(v, ParseResults): - if v: - out.append( - v.dump( - indent=indent, - full=full, - include_list=include_list, - _depth=_depth + 1, - ) - ) - else: - out.append(str(v)) - else: - out.append(repr(v)) - if any(isinstance(vv, ParseResults) for vv in self): - v = self - for i, vv in enumerate(v): - if isinstance(vv, ParseResults): - out.append( - "\n{}{}[{}]:\n{}{}{}".format( - indent, - (" " * (_depth)), - i, - indent, - (" " * (_depth + 1)), - vv.dump( - indent=indent, - full=full, - include_list=include_list, - _depth=_depth + 1, - ), - ) - ) - else: - out.append( - "\n%s%s[%d]:\n%s%s%s" - % ( - indent, - (" " * (_depth)), - i, - indent, - (" " * (_depth + 1)), - str(vv), - ) - ) + if not full: + return "".join(out) + + if self.haskeys(): + items = sorted((str(k), v) for k, v in self.items()) + for k, v in items: + if out: + out.append(NL) + out.append(f"{indent}{(' ' * _depth)}- {k}: ") + if not isinstance(v, ParseResults): + out.append(repr(v)) + continue + + if not v: + out.append(str(v)) + continue + + out.append( + v.dump( + indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1, + ) + ) + if not any(isinstance(vv, ParseResults) for vv in self): + return "".join(out) + + v = self + incr = " " + nl = "\n" + for i, vv in enumerate(v): + if isinstance(vv, ParseResults): + vv_dump = vv.dump( + indent=indent, + full=full, + include_list=include_list, + _depth=_depth + 1, + ) + out.append( + f"{nl}{indent}{incr * _depth}[{i}]:{nl}{indent}{incr * (_depth + 1)}{vv_dump}" + ) + else: + out.append( + f"{nl}{indent}{incr * _depth}[{i}]:{nl}{indent}{incr * (_depth + 1)}{vv}" + ) return "".join(out) diff --git a/src/pip/_vendor/pyparsing/testing.py b/src/pip/_vendor/pyparsing/testing.py index 6a254c1c5e2..5654d47d62d 100644 --- a/src/pip/_vendor/pyparsing/testing.py +++ b/src/pip/_vendor/pyparsing/testing.py @@ -1,8 +1,10 @@ # testing.py from contextlib import contextmanager +import re import typing + from .core import ( ParserElement, ParseException, @@ -49,23 +51,23 @@ def save(self): self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS - self._save_context[ - "literal_string_class" - ] = ParserElement._literalStringClass + self._save_context["literal_string_class"] = ( + ParserElement._literalStringClass + ) self._save_context["verbose_stacktrace"] = ParserElement.verbose_stacktrace self._save_context["packrat_enabled"] = ParserElement._packratEnabled if ParserElement._packratEnabled: - self._save_context[ - "packrat_cache_size" - ] = ParserElement.packrat_cache.size + self._save_context["packrat_cache_size"] = ( + ParserElement.packrat_cache.size + ) else: self._save_context["packrat_cache_size"] = None self._save_context["packrat_parse"] = ParserElement._parse - self._save_context[ - "recursion_enabled" - ] = ParserElement._left_recursion_enabled + self._save_context["recursion_enabled"] = ( + ParserElement._left_recursion_enabled + ) self._save_context["__diag__"] = { name: getattr(__diag__, name) for name in __diag__._all_names @@ -180,49 +182,52 @@ def assertRunTestResults( """ run_test_success, run_test_results = run_tests_report - if expected_parse_results is not None: - merged = [ - (*rpt, expected) - for rpt, expected in zip(run_test_results, expected_parse_results) - ] - for test_string, result, expected in merged: - # expected should be a tuple containing a list and/or a dict or an exception, - # and optional failure message string - # an empty tuple will skip any result validation - fail_msg = next( - (exp for exp in expected if isinstance(exp, str)), None + if expected_parse_results is None: + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + return + + merged = [ + (*rpt, expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next((exp for exp in expected if isinstance(exp, str)), None) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None ) - expected_exception = next( - ( - exp - for exp in expected - if isinstance(exp, type) and issubclass(exp, Exception) - ), - None, + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None ) - if expected_exception is not None: - with self.assertRaises( - expected_exception=expected_exception, msg=fail_msg or msg - ): - if isinstance(result, Exception): - raise result - else: - expected_list = next( - (exp for exp in expected if isinstance(exp, list)), None + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, ) - expected_dict = next( - (exp for exp in expected if isinstance(exp, dict)), None - ) - if (expected_list, expected_dict) != (None, None): - self.assertParseResultsEquals( - result, - expected_list=expected_list, - expected_dict=expected_dict, - msg=fail_msg or msg, - ) - else: - # warning here maybe? - print(f"no validation for {test_string!r}") + else: + # warning here maybe? + print(f"no validation for {test_string!r}") # do this last, in case some specific test results can be reported instead self.assertTrue( @@ -230,9 +235,18 @@ def assertRunTestResults( ) @contextmanager - def assertRaisesParseException(self, exc_type=ParseException, msg=None): - with self.assertRaises(exc_type, msg=msg): - yield + def assertRaisesParseException( + self, exc_type=ParseException, expected_msg=None, msg=None + ): + if expected_msg is not None: + if isinstance(expected_msg, str): + expected_msg = re.escape(expected_msg) + with self.assertRaisesRegex(exc_type, expected_msg, msg=msg) as ctx: + yield ctx + + else: + with self.assertRaises(exc_type, msg=msg) as ctx: + yield ctx @staticmethod def with_line_numbers( diff --git a/src/pip/_vendor/pyparsing/unicode.py b/src/pip/_vendor/pyparsing/unicode.py index ec0b3a4fe60..e0b7b4ccb9a 100644 --- a/src/pip/_vendor/pyparsing/unicode.py +++ b/src/pip/_vendor/pyparsing/unicode.py @@ -102,17 +102,10 @@ def identbodychars(cls): all characters in this range that are valid identifier body characters, plus the digits 0-9, and · (Unicode MIDDLE DOT) """ - return "".join( - sorted( - set( - cls.identchars - + "0123456789·" - + "".join( - [c for c in cls._chars_for_ranges if ("_" + c).isidentifier()] - ) - ) - ) + identifier_chars = set( + c for c in cls._chars_for_ranges if ("_" + c).isidentifier() ) + return "".join(sorted(identifier_chars | set(cls.identchars + "0123456789·"))) @_lazyclassproperty def identifier(cls): diff --git a/src/pip/_vendor/pyparsing/util.py b/src/pip/_vendor/pyparsing/util.py index d8d3f414cca..4ae018a9636 100644 --- a/src/pip/_vendor/pyparsing/util.py +++ b/src/pip/_vendor/pyparsing/util.py @@ -237,7 +237,7 @@ def _flatten(ll: list) -> list: return ret -def _make_synonym_function(compat_name: str, fn: C) -> C: +def replaced_by_pep8(compat_name: str, fn: C) -> C: # In a future version, uncomment the code in the internal _inner() functions # to begin emitting DeprecationWarnings. @@ -251,7 +251,7 @@ def _make_synonym_function(compat_name: str, fn: C) -> C: @wraps(fn) def _inner(self, *args, **kwargs): # warnings.warn( - # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=2 # ) return fn(self, *args, **kwargs) @@ -260,7 +260,7 @@ def _inner(self, *args, **kwargs): @wraps(fn) def _inner(*args, **kwargs): # warnings.warn( - # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=2 # ) return fn(*args, **kwargs) @@ -275,10 +275,3 @@ def _inner(*args, **kwargs): _inner.__kwdefaults__ = None _inner.__qualname__ = fn.__qualname__ return cast(C, _inner) - - -def replaced_by_pep8(fn: C) -> Callable[[Callable], C]: - """ - Decorator for pre-PEP8 compatibility synonyms, to link them to the new function. - """ - return lambda other: _make_synonym_function(other.__name__, fn) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index fc60f5cae04..ab207435d0d 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -5,7 +5,7 @@ distro==1.9.0 msgpack==1.0.8 packaging==21.3 platformdirs==4.2.1 -pyparsing==3.1.0 +pyparsing==3.1.2 pyproject-hooks==1.0.0 requests==2.31.0 certifi==2024.2.2 From cba5b13f9b0791de05456d7a13d0c1a0a32e1f95 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 21:03:50 +0100 Subject: [PATCH 4/7] Upgrade idna to 3.7 --- news/idna.vendor.rst | 2 +- src/pip/_vendor/idna/LICENSE.md | 2 +- src/pip/_vendor/idna/core.py | 23 +- src/pip/_vendor/idna/idnadata.py | 2197 +++++++++++++++++++++++++- src/pip/_vendor/idna/package_data.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 6 files changed, 2159 insertions(+), 69 deletions(-) diff --git a/news/idna.vendor.rst b/news/idna.vendor.rst index 229b1f3568a..1b8f7430aa6 100644 --- a/news/idna.vendor.rst +++ b/news/idna.vendor.rst @@ -1 +1 @@ -Upgrade idna to 3.6 +Upgrade idna to 3.7 diff --git a/src/pip/_vendor/idna/LICENSE.md b/src/pip/_vendor/idna/LICENSE.md index ce3670186c6..19b6b45242c 100644 --- a/src/pip/_vendor/idna/LICENSE.md +++ b/src/pip/_vendor/idna/LICENSE.md @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2013-2023, Kim Davies and contributors. +Copyright (c) 2013-2024, Kim Davies and contributors. All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/src/pip/_vendor/idna/core.py b/src/pip/_vendor/idna/core.py index aaf7d658ba0..0dae61acdbc 100644 --- a/src/pip/_vendor/idna/core.py +++ b/src/pip/_vendor/idna/core.py @@ -150,9 +150,11 @@ def valid_contextj(label: str, pos: int) -> bool: joining_type = idnadata.joining_types.get(ord(label[i])) if joining_type == ord('T'): continue - if joining_type in [ord('L'), ord('D')]: + elif joining_type in [ord('L'), ord('D')]: ok = True break + else: + break if not ok: return False @@ -162,9 +164,11 @@ def valid_contextj(label: str, pos: int) -> bool: joining_type = idnadata.joining_types.get(ord(label[i])) if joining_type == ord('T'): continue - if joining_type in [ord('R'), ord('D')]: + elif joining_type in [ord('R'), ord('D')]: ok = True break + else: + break return ok if cp_value == 0x200d: @@ -236,12 +240,8 @@ def check_label(label: Union[str, bytes, bytearray]) -> None: if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): continue elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( - _unot(cp_value), pos+1, repr(label))) - except ValueError: - raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( _unot(cp_value), pos+1, repr(label))) elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): if not valid_contexto(label, pos): @@ -262,13 +262,8 @@ def alabel(label: str) -> bytes: except UnicodeEncodeError: pass - if not label: - raise IDNAError('No Input') - - label = str(label) check_label(label) - label_bytes = _punycode(label) - label_bytes = _alabel_prefix + label_bytes + label_bytes = _alabel_prefix + _punycode(label) if not valid_label_length(label_bytes): raise IDNAError('Label too long') diff --git a/src/pip/_vendor/idna/idnadata.py b/src/pip/_vendor/idna/idnadata.py index 5cd05d9056e..c61dcf977e5 100644 --- a/src/pip/_vendor/idna/idnadata.py +++ b/src/pip/_vendor/idna/idnadata.py @@ -101,16 +101,190 @@ ), } joining_types = { - 0x600: 85, - 0x601: 85, - 0x602: 85, - 0x603: 85, - 0x604: 85, - 0x605: 85, - 0x608: 85, - 0x60b: 85, + 0xad: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30a: 84, + 0x30b: 84, + 0x30c: 84, + 0x30d: 84, + 0x30e: 84, + 0x30f: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31a: 84, + 0x31b: 84, + 0x31c: 84, + 0x31d: 84, + 0x31e: 84, + 0x31f: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32a: 84, + 0x32b: 84, + 0x32c: 84, + 0x32d: 84, + 0x32e: 84, + 0x32f: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33a: 84, + 0x33b: 84, + 0x33c: 84, + 0x33d: 84, + 0x33e: 84, + 0x33f: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34a: 84, + 0x34b: 84, + 0x34c: 84, + 0x34d: 84, + 0x34e: 84, + 0x34f: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35a: 84, + 0x35b: 84, + 0x35c: 84, + 0x35d: 84, + 0x35e: 84, + 0x35f: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36a: 84, + 0x36b: 84, + 0x36c: 84, + 0x36d: 84, + 0x36e: 84, + 0x36f: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59a: 84, + 0x59b: 84, + 0x59c: 84, + 0x59d: 84, + 0x59e: 84, + 0x59f: 84, + 0x5a0: 84, + 0x5a1: 84, + 0x5a2: 84, + 0x5a3: 84, + 0x5a4: 84, + 0x5a5: 84, + 0x5a6: 84, + 0x5a7: 84, + 0x5a8: 84, + 0x5a9: 84, + 0x5aa: 84, + 0x5ab: 84, + 0x5ac: 84, + 0x5ad: 84, + 0x5ae: 84, + 0x5af: 84, + 0x5b0: 84, + 0x5b1: 84, + 0x5b2: 84, + 0x5b3: 84, + 0x5b4: 84, + 0x5b5: 84, + 0x5b6: 84, + 0x5b7: 84, + 0x5b8: 84, + 0x5b9: 84, + 0x5ba: 84, + 0x5bb: 84, + 0x5bc: 84, + 0x5bd: 84, + 0x5bf: 84, + 0x5c1: 84, + 0x5c2: 84, + 0x5c4: 84, + 0x5c5: 84, + 0x5c7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61a: 84, + 0x61c: 84, 0x620: 68, - 0x621: 85, 0x622: 82, 0x623: 82, 0x624: 82, @@ -152,12 +326,33 @@ 0x648: 82, 0x649: 68, 0x64a: 68, + 0x64b: 84, + 0x64c: 84, + 0x64d: 84, + 0x64e: 84, + 0x64f: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65a: 84, + 0x65b: 84, + 0x65c: 84, + 0x65d: 84, + 0x65e: 84, + 0x65f: 84, 0x66e: 68, 0x66f: 68, + 0x670: 84, 0x671: 82, 0x672: 82, 0x673: 82, - 0x674: 85, 0x675: 82, 0x676: 82, 0x677: 82, @@ -254,7 +449,25 @@ 0x6d2: 82, 0x6d3: 82, 0x6d5: 82, - 0x6dd: 85, + 0x6d6: 84, + 0x6d7: 84, + 0x6d8: 84, + 0x6d9: 84, + 0x6da: 84, + 0x6db: 84, + 0x6dc: 84, + 0x6df: 84, + 0x6e0: 84, + 0x6e1: 84, + 0x6e2: 84, + 0x6e3: 84, + 0x6e4: 84, + 0x6e7: 84, + 0x6e8: 84, + 0x6ea: 84, + 0x6eb: 84, + 0x6ec: 84, + 0x6ed: 84, 0x6ee: 82, 0x6ef: 82, 0x6fa: 68, @@ -263,6 +476,7 @@ 0x6ff: 68, 0x70f: 84, 0x710: 82, + 0x711: 84, 0x712: 68, 0x713: 68, 0x714: 68, @@ -293,6 +507,33 @@ 0x72d: 68, 0x72e: 68, 0x72f: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73a: 84, + 0x73b: 84, + 0x73c: 84, + 0x73d: 84, + 0x73e: 84, + 0x73f: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74a: 84, 0x74d: 82, 0x74e: 68, 0x74f: 68, @@ -344,6 +585,17 @@ 0x77d: 68, 0x77e: 68, 0x77f: 68, + 0x7a6: 84, + 0x7a7: 84, + 0x7a8: 84, + 0x7a9: 84, + 0x7aa: 84, + 0x7ab: 84, + 0x7ac: 84, + 0x7ad: 84, + 0x7ae: 84, + 0x7af: 84, + 0x7b0: 84, 0x7ca: 68, 0x7cb: 68, 0x7cc: 68, @@ -377,7 +629,38 @@ 0x7e8: 68, 0x7e9: 68, 0x7ea: 68, + 0x7eb: 84, + 0x7ec: 84, + 0x7ed: 84, + 0x7ee: 84, + 0x7ef: 84, + 0x7f0: 84, + 0x7f1: 84, + 0x7f2: 84, + 0x7f3: 84, 0x7fa: 67, + 0x7fd: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81b: 84, + 0x81c: 84, + 0x81d: 84, + 0x81e: 84, + 0x81f: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82a: 84, + 0x82b: 84, + 0x82c: 84, + 0x82d: 84, 0x840: 82, 0x841: 68, 0x842: 68, @@ -403,13 +686,14 @@ 0x856: 82, 0x857: 82, 0x858: 82, + 0x859: 84, + 0x85a: 84, + 0x85b: 84, 0x860: 68, - 0x861: 85, 0x862: 68, 0x863: 68, 0x864: 68, 0x865: 68, - 0x866: 85, 0x867: 82, 0x868: 68, 0x869: 82, @@ -437,16 +721,20 @@ 0x884: 67, 0x885: 67, 0x886: 68, - 0x887: 85, - 0x888: 85, 0x889: 68, 0x88a: 68, 0x88b: 68, 0x88c: 68, 0x88d: 68, 0x88e: 82, - 0x890: 85, - 0x891: 85, + 0x898: 84, + 0x899: 84, + 0x89a: 84, + 0x89b: 84, + 0x89c: 84, + 0x89d: 84, + 0x89e: 84, + 0x89f: 84, 0x8a0: 68, 0x8a1: 68, 0x8a2: 68, @@ -460,7 +748,6 @@ 0x8aa: 82, 0x8ab: 82, 0x8ac: 82, - 0x8ad: 85, 0x8ae: 82, 0x8af: 68, 0x8b0: 68, @@ -488,11 +775,357 @@ 0x8c6: 68, 0x8c7: 68, 0x8c8: 68, - 0x8e2: 85, - 0x1806: 85, + 0x8ca: 84, + 0x8cb: 84, + 0x8cc: 84, + 0x8cd: 84, + 0x8ce: 84, + 0x8cf: 84, + 0x8d0: 84, + 0x8d1: 84, + 0x8d2: 84, + 0x8d3: 84, + 0x8d4: 84, + 0x8d5: 84, + 0x8d6: 84, + 0x8d7: 84, + 0x8d8: 84, + 0x8d9: 84, + 0x8da: 84, + 0x8db: 84, + 0x8dc: 84, + 0x8dd: 84, + 0x8de: 84, + 0x8df: 84, + 0x8e0: 84, + 0x8e1: 84, + 0x8e3: 84, + 0x8e4: 84, + 0x8e5: 84, + 0x8e6: 84, + 0x8e7: 84, + 0x8e8: 84, + 0x8e9: 84, + 0x8ea: 84, + 0x8eb: 84, + 0x8ec: 84, + 0x8ed: 84, + 0x8ee: 84, + 0x8ef: 84, + 0x8f0: 84, + 0x8f1: 84, + 0x8f2: 84, + 0x8f3: 84, + 0x8f4: 84, + 0x8f5: 84, + 0x8f6: 84, + 0x8f7: 84, + 0x8f8: 84, + 0x8f9: 84, + 0x8fa: 84, + 0x8fb: 84, + 0x8fc: 84, + 0x8fd: 84, + 0x8fe: 84, + 0x8ff: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93a: 84, + 0x93c: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94d: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9bc: 84, + 0x9c1: 84, + 0x9c2: 84, + 0x9c3: 84, + 0x9c4: 84, + 0x9cd: 84, + 0x9e2: 84, + 0x9e3: 84, + 0x9fe: 84, + 0xa01: 84, + 0xa02: 84, + 0xa3c: 84, + 0xa41: 84, + 0xa42: 84, + 0xa47: 84, + 0xa48: 84, + 0xa4b: 84, + 0xa4c: 84, + 0xa4d: 84, + 0xa51: 84, + 0xa70: 84, + 0xa71: 84, + 0xa75: 84, + 0xa81: 84, + 0xa82: 84, + 0xabc: 84, + 0xac1: 84, + 0xac2: 84, + 0xac3: 84, + 0xac4: 84, + 0xac5: 84, + 0xac7: 84, + 0xac8: 84, + 0xacd: 84, + 0xae2: 84, + 0xae3: 84, + 0xafa: 84, + 0xafb: 84, + 0xafc: 84, + 0xafd: 84, + 0xafe: 84, + 0xaff: 84, + 0xb01: 84, + 0xb3c: 84, + 0xb3f: 84, + 0xb41: 84, + 0xb42: 84, + 0xb43: 84, + 0xb44: 84, + 0xb4d: 84, + 0xb55: 84, + 0xb56: 84, + 0xb62: 84, + 0xb63: 84, + 0xb82: 84, + 0xbc0: 84, + 0xbcd: 84, + 0xc00: 84, + 0xc04: 84, + 0xc3c: 84, + 0xc3e: 84, + 0xc3f: 84, + 0xc40: 84, + 0xc46: 84, + 0xc47: 84, + 0xc48: 84, + 0xc4a: 84, + 0xc4b: 84, + 0xc4c: 84, + 0xc4d: 84, + 0xc55: 84, + 0xc56: 84, + 0xc62: 84, + 0xc63: 84, + 0xc81: 84, + 0xcbc: 84, + 0xcbf: 84, + 0xcc6: 84, + 0xccc: 84, + 0xccd: 84, + 0xce2: 84, + 0xce3: 84, + 0xd00: 84, + 0xd01: 84, + 0xd3b: 84, + 0xd3c: 84, + 0xd41: 84, + 0xd42: 84, + 0xd43: 84, + 0xd44: 84, + 0xd4d: 84, + 0xd62: 84, + 0xd63: 84, + 0xd81: 84, + 0xdca: 84, + 0xdd2: 84, + 0xdd3: 84, + 0xdd4: 84, + 0xdd6: 84, + 0xe31: 84, + 0xe34: 84, + 0xe35: 84, + 0xe36: 84, + 0xe37: 84, + 0xe38: 84, + 0xe39: 84, + 0xe3a: 84, + 0xe47: 84, + 0xe48: 84, + 0xe49: 84, + 0xe4a: 84, + 0xe4b: 84, + 0xe4c: 84, + 0xe4d: 84, + 0xe4e: 84, + 0xeb1: 84, + 0xeb4: 84, + 0xeb5: 84, + 0xeb6: 84, + 0xeb7: 84, + 0xeb8: 84, + 0xeb9: 84, + 0xeba: 84, + 0xebb: 84, + 0xebc: 84, + 0xec8: 84, + 0xec9: 84, + 0xeca: 84, + 0xecb: 84, + 0xecc: 84, + 0xecd: 84, + 0xece: 84, + 0xf18: 84, + 0xf19: 84, + 0xf35: 84, + 0xf37: 84, + 0xf39: 84, + 0xf71: 84, + 0xf72: 84, + 0xf73: 84, + 0xf74: 84, + 0xf75: 84, + 0xf76: 84, + 0xf77: 84, + 0xf78: 84, + 0xf79: 84, + 0xf7a: 84, + 0xf7b: 84, + 0xf7c: 84, + 0xf7d: 84, + 0xf7e: 84, + 0xf80: 84, + 0xf81: 84, + 0xf82: 84, + 0xf83: 84, + 0xf84: 84, + 0xf86: 84, + 0xf87: 84, + 0xf8d: 84, + 0xf8e: 84, + 0xf8f: 84, + 0xf90: 84, + 0xf91: 84, + 0xf92: 84, + 0xf93: 84, + 0xf94: 84, + 0xf95: 84, + 0xf96: 84, + 0xf97: 84, + 0xf99: 84, + 0xf9a: 84, + 0xf9b: 84, + 0xf9c: 84, + 0xf9d: 84, + 0xf9e: 84, + 0xf9f: 84, + 0xfa0: 84, + 0xfa1: 84, + 0xfa2: 84, + 0xfa3: 84, + 0xfa4: 84, + 0xfa5: 84, + 0xfa6: 84, + 0xfa7: 84, + 0xfa8: 84, + 0xfa9: 84, + 0xfaa: 84, + 0xfab: 84, + 0xfac: 84, + 0xfad: 84, + 0xfae: 84, + 0xfaf: 84, + 0xfb0: 84, + 0xfb1: 84, + 0xfb2: 84, + 0xfb3: 84, + 0xfb4: 84, + 0xfb5: 84, + 0xfb6: 84, + 0xfb7: 84, + 0xfb8: 84, + 0xfb9: 84, + 0xfba: 84, + 0xfbb: 84, + 0xfbc: 84, + 0xfc6: 84, + 0x102d: 84, + 0x102e: 84, + 0x102f: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103a: 84, + 0x103d: 84, + 0x103e: 84, + 0x1058: 84, + 0x1059: 84, + 0x105e: 84, + 0x105f: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108d: 84, + 0x109d: 84, + 0x135d: 84, + 0x135e: 84, + 0x135f: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17b4: 84, + 0x17b5: 84, + 0x17b7: 84, + 0x17b8: 84, + 0x17b9: 84, + 0x17ba: 84, + 0x17bb: 84, + 0x17bc: 84, + 0x17bd: 84, + 0x17c6: 84, + 0x17c9: 84, + 0x17ca: 84, + 0x17cb: 84, + 0x17cc: 84, + 0x17cd: 84, + 0x17ce: 84, + 0x17cf: 84, + 0x17d0: 84, + 0x17d1: 84, + 0x17d2: 84, + 0x17d3: 84, + 0x17dd: 84, 0x1807: 68, 0x180a: 67, - 0x180e: 85, + 0x180b: 84, + 0x180c: 84, + 0x180d: 84, + 0x180f: 84, 0x1820: 68, 0x1821: 68, 0x1822: 68, @@ -582,11 +1215,6 @@ 0x1876: 68, 0x1877: 68, 0x1878: 68, - 0x1880: 85, - 0x1881: 85, - 0x1882: 85, - 0x1883: 85, - 0x1884: 85, 0x1885: 84, 0x1886: 84, 0x1887: 68, @@ -623,14 +1251,339 @@ 0x18a6: 68, 0x18a7: 68, 0x18a8: 68, + 0x18a9: 84, 0x18aa: 68, - 0x200c: 85, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193a: 84, + 0x193b: 84, + 0x1a17: 84, + 0x1a18: 84, + 0x1a1b: 84, + 0x1a56: 84, + 0x1a58: 84, + 0x1a59: 84, + 0x1a5a: 84, + 0x1a5b: 84, + 0x1a5c: 84, + 0x1a5d: 84, + 0x1a5e: 84, + 0x1a60: 84, + 0x1a62: 84, + 0x1a65: 84, + 0x1a66: 84, + 0x1a67: 84, + 0x1a68: 84, + 0x1a69: 84, + 0x1a6a: 84, + 0x1a6b: 84, + 0x1a6c: 84, + 0x1a73: 84, + 0x1a74: 84, + 0x1a75: 84, + 0x1a76: 84, + 0x1a77: 84, + 0x1a78: 84, + 0x1a79: 84, + 0x1a7a: 84, + 0x1a7b: 84, + 0x1a7c: 84, + 0x1a7f: 84, + 0x1ab0: 84, + 0x1ab1: 84, + 0x1ab2: 84, + 0x1ab3: 84, + 0x1ab4: 84, + 0x1ab5: 84, + 0x1ab6: 84, + 0x1ab7: 84, + 0x1ab8: 84, + 0x1ab9: 84, + 0x1aba: 84, + 0x1abb: 84, + 0x1abc: 84, + 0x1abd: 84, + 0x1abe: 84, + 0x1abf: 84, + 0x1ac0: 84, + 0x1ac1: 84, + 0x1ac2: 84, + 0x1ac3: 84, + 0x1ac4: 84, + 0x1ac5: 84, + 0x1ac6: 84, + 0x1ac7: 84, + 0x1ac8: 84, + 0x1ac9: 84, + 0x1aca: 84, + 0x1acb: 84, + 0x1acc: 84, + 0x1acd: 84, + 0x1ace: 84, + 0x1b00: 84, + 0x1b01: 84, + 0x1b02: 84, + 0x1b03: 84, + 0x1b34: 84, + 0x1b36: 84, + 0x1b37: 84, + 0x1b38: 84, + 0x1b39: 84, + 0x1b3a: 84, + 0x1b3c: 84, + 0x1b42: 84, + 0x1b6b: 84, + 0x1b6c: 84, + 0x1b6d: 84, + 0x1b6e: 84, + 0x1b6f: 84, + 0x1b70: 84, + 0x1b71: 84, + 0x1b72: 84, + 0x1b73: 84, + 0x1b80: 84, + 0x1b81: 84, + 0x1ba2: 84, + 0x1ba3: 84, + 0x1ba4: 84, + 0x1ba5: 84, + 0x1ba8: 84, + 0x1ba9: 84, + 0x1bab: 84, + 0x1bac: 84, + 0x1bad: 84, + 0x1be6: 84, + 0x1be8: 84, + 0x1be9: 84, + 0x1bed: 84, + 0x1bef: 84, + 0x1bf0: 84, + 0x1bf1: 84, + 0x1c2c: 84, + 0x1c2d: 84, + 0x1c2e: 84, + 0x1c2f: 84, + 0x1c30: 84, + 0x1c31: 84, + 0x1c32: 84, + 0x1c33: 84, + 0x1c36: 84, + 0x1c37: 84, + 0x1cd0: 84, + 0x1cd1: 84, + 0x1cd2: 84, + 0x1cd4: 84, + 0x1cd5: 84, + 0x1cd6: 84, + 0x1cd7: 84, + 0x1cd8: 84, + 0x1cd9: 84, + 0x1cda: 84, + 0x1cdb: 84, + 0x1cdc: 84, + 0x1cdd: 84, + 0x1cde: 84, + 0x1cdf: 84, + 0x1ce0: 84, + 0x1ce2: 84, + 0x1ce3: 84, + 0x1ce4: 84, + 0x1ce5: 84, + 0x1ce6: 84, + 0x1ce7: 84, + 0x1ce8: 84, + 0x1ced: 84, + 0x1cf4: 84, + 0x1cf8: 84, + 0x1cf9: 84, + 0x1dc0: 84, + 0x1dc1: 84, + 0x1dc2: 84, + 0x1dc3: 84, + 0x1dc4: 84, + 0x1dc5: 84, + 0x1dc6: 84, + 0x1dc7: 84, + 0x1dc8: 84, + 0x1dc9: 84, + 0x1dca: 84, + 0x1dcb: 84, + 0x1dcc: 84, + 0x1dcd: 84, + 0x1dce: 84, + 0x1dcf: 84, + 0x1dd0: 84, + 0x1dd1: 84, + 0x1dd2: 84, + 0x1dd3: 84, + 0x1dd4: 84, + 0x1dd5: 84, + 0x1dd6: 84, + 0x1dd7: 84, + 0x1dd8: 84, + 0x1dd9: 84, + 0x1dda: 84, + 0x1ddb: 84, + 0x1ddc: 84, + 0x1ddd: 84, + 0x1dde: 84, + 0x1ddf: 84, + 0x1de0: 84, + 0x1de1: 84, + 0x1de2: 84, + 0x1de3: 84, + 0x1de4: 84, + 0x1de5: 84, + 0x1de6: 84, + 0x1de7: 84, + 0x1de8: 84, + 0x1de9: 84, + 0x1dea: 84, + 0x1deb: 84, + 0x1dec: 84, + 0x1ded: 84, + 0x1dee: 84, + 0x1def: 84, + 0x1df0: 84, + 0x1df1: 84, + 0x1df2: 84, + 0x1df3: 84, + 0x1df4: 84, + 0x1df5: 84, + 0x1df6: 84, + 0x1df7: 84, + 0x1df8: 84, + 0x1df9: 84, + 0x1dfa: 84, + 0x1dfb: 84, + 0x1dfc: 84, + 0x1dfd: 84, + 0x1dfe: 84, + 0x1dff: 84, + 0x200b: 84, 0x200d: 67, - 0x202f: 85, - 0x2066: 85, - 0x2067: 85, - 0x2068: 85, - 0x2069: 85, + 0x200e: 84, + 0x200f: 84, + 0x202a: 84, + 0x202b: 84, + 0x202c: 84, + 0x202d: 84, + 0x202e: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206a: 84, + 0x206b: 84, + 0x206c: 84, + 0x206d: 84, + 0x206e: 84, + 0x206f: 84, + 0x20d0: 84, + 0x20d1: 84, + 0x20d2: 84, + 0x20d3: 84, + 0x20d4: 84, + 0x20d5: 84, + 0x20d6: 84, + 0x20d7: 84, + 0x20d8: 84, + 0x20d9: 84, + 0x20da: 84, + 0x20db: 84, + 0x20dc: 84, + 0x20dd: 84, + 0x20de: 84, + 0x20df: 84, + 0x20e0: 84, + 0x20e1: 84, + 0x20e2: 84, + 0x20e3: 84, + 0x20e4: 84, + 0x20e5: 84, + 0x20e6: 84, + 0x20e7: 84, + 0x20e8: 84, + 0x20e9: 84, + 0x20ea: 84, + 0x20eb: 84, + 0x20ec: 84, + 0x20ed: 84, + 0x20ee: 84, + 0x20ef: 84, + 0x20f0: 84, + 0x2cef: 84, + 0x2cf0: 84, + 0x2cf1: 84, + 0x2d7f: 84, + 0x2de0: 84, + 0x2de1: 84, + 0x2de2: 84, + 0x2de3: 84, + 0x2de4: 84, + 0x2de5: 84, + 0x2de6: 84, + 0x2de7: 84, + 0x2de8: 84, + 0x2de9: 84, + 0x2dea: 84, + 0x2deb: 84, + 0x2dec: 84, + 0x2ded: 84, + 0x2dee: 84, + 0x2def: 84, + 0x2df0: 84, + 0x2df1: 84, + 0x2df2: 84, + 0x2df3: 84, + 0x2df4: 84, + 0x2df5: 84, + 0x2df6: 84, + 0x2df7: 84, + 0x2df8: 84, + 0x2df9: 84, + 0x2dfa: 84, + 0x2dfb: 84, + 0x2dfc: 84, + 0x2dfd: 84, + 0x2dfe: 84, + 0x2dff: 84, + 0x302a: 84, + 0x302b: 84, + 0x302c: 84, + 0x302d: 84, + 0x3099: 84, + 0x309a: 84, + 0xa66f: 84, + 0xa670: 84, + 0xa671: 84, + 0xa672: 84, + 0xa674: 84, + 0xa675: 84, + 0xa676: 84, + 0xa677: 84, + 0xa678: 84, + 0xa679: 84, + 0xa67a: 84, + 0xa67b: 84, + 0xa67c: 84, + 0xa67d: 84, + 0xa69e: 84, + 0xa69f: 84, + 0xa6f0: 84, + 0xa6f1: 84, + 0xa802: 84, + 0xa806: 84, + 0xa80b: 84, + 0xa825: 84, + 0xa826: 84, + 0xa82c: 84, 0xa840: 68, 0xa841: 68, 0xa842: 68, @@ -682,20 +1635,151 @@ 0xa870: 68, 0xa871: 68, 0xa872: 76, - 0xa873: 85, + 0xa8c4: 84, + 0xa8c5: 84, + 0xa8e0: 84, + 0xa8e1: 84, + 0xa8e2: 84, + 0xa8e3: 84, + 0xa8e4: 84, + 0xa8e5: 84, + 0xa8e6: 84, + 0xa8e7: 84, + 0xa8e8: 84, + 0xa8e9: 84, + 0xa8ea: 84, + 0xa8eb: 84, + 0xa8ec: 84, + 0xa8ed: 84, + 0xa8ee: 84, + 0xa8ef: 84, + 0xa8f0: 84, + 0xa8f1: 84, + 0xa8ff: 84, + 0xa926: 84, + 0xa927: 84, + 0xa928: 84, + 0xa929: 84, + 0xa92a: 84, + 0xa92b: 84, + 0xa92c: 84, + 0xa92d: 84, + 0xa947: 84, + 0xa948: 84, + 0xa949: 84, + 0xa94a: 84, + 0xa94b: 84, + 0xa94c: 84, + 0xa94d: 84, + 0xa94e: 84, + 0xa94f: 84, + 0xa950: 84, + 0xa951: 84, + 0xa980: 84, + 0xa981: 84, + 0xa982: 84, + 0xa9b3: 84, + 0xa9b6: 84, + 0xa9b7: 84, + 0xa9b8: 84, + 0xa9b9: 84, + 0xa9bc: 84, + 0xa9bd: 84, + 0xa9e5: 84, + 0xaa29: 84, + 0xaa2a: 84, + 0xaa2b: 84, + 0xaa2c: 84, + 0xaa2d: 84, + 0xaa2e: 84, + 0xaa31: 84, + 0xaa32: 84, + 0xaa35: 84, + 0xaa36: 84, + 0xaa43: 84, + 0xaa4c: 84, + 0xaa7c: 84, + 0xaab0: 84, + 0xaab2: 84, + 0xaab3: 84, + 0xaab4: 84, + 0xaab7: 84, + 0xaab8: 84, + 0xaabe: 84, + 0xaabf: 84, + 0xaac1: 84, + 0xaaec: 84, + 0xaaed: 84, + 0xaaf6: 84, + 0xabe5: 84, + 0xabe8: 84, + 0xabed: 84, + 0xfb1e: 84, + 0xfe00: 84, + 0xfe01: 84, + 0xfe02: 84, + 0xfe03: 84, + 0xfe04: 84, + 0xfe05: 84, + 0xfe06: 84, + 0xfe07: 84, + 0xfe08: 84, + 0xfe09: 84, + 0xfe0a: 84, + 0xfe0b: 84, + 0xfe0c: 84, + 0xfe0d: 84, + 0xfe0e: 84, + 0xfe0f: 84, + 0xfe20: 84, + 0xfe21: 84, + 0xfe22: 84, + 0xfe23: 84, + 0xfe24: 84, + 0xfe25: 84, + 0xfe26: 84, + 0xfe27: 84, + 0xfe28: 84, + 0xfe29: 84, + 0xfe2a: 84, + 0xfe2b: 84, + 0xfe2c: 84, + 0xfe2d: 84, + 0xfe2e: 84, + 0xfe2f: 84, + 0xfeff: 84, + 0xfff9: 84, + 0xfffa: 84, + 0xfffb: 84, + 0x101fd: 84, + 0x102e0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037a: 84, + 0x10a01: 84, + 0x10a02: 84, + 0x10a03: 84, + 0x10a05: 84, + 0x10a06: 84, + 0x10a0c: 84, + 0x10a0d: 84, + 0x10a0e: 84, + 0x10a0f: 84, + 0x10a38: 84, + 0x10a39: 84, + 0x10a3a: 84, + 0x10a3f: 84, 0x10ac0: 68, 0x10ac1: 68, 0x10ac2: 68, 0x10ac3: 68, 0x10ac4: 68, 0x10ac5: 82, - 0x10ac6: 85, 0x10ac7: 82, - 0x10ac8: 85, 0x10ac9: 82, 0x10aca: 82, - 0x10acb: 85, - 0x10acc: 85, 0x10acd: 76, 0x10ace: 82, 0x10acf: 82, @@ -717,9 +1801,9 @@ 0x10adf: 68, 0x10ae0: 68, 0x10ae1: 82, - 0x10ae2: 85, - 0x10ae3: 85, 0x10ae4: 82, + 0x10ae5: 84, + 0x10ae6: 84, 0x10aeb: 68, 0x10aec: 68, 0x10aed: 68, @@ -749,7 +1833,6 @@ 0x10bac: 82, 0x10bad: 68, 0x10bae: 68, - 0x10baf: 85, 0x10d00: 76, 0x10d01: 68, 0x10d02: 68, @@ -786,6 +1869,15 @@ 0x10d21: 68, 0x10d22: 82, 0x10d23: 68, + 0x10d24: 84, + 0x10d25: 84, + 0x10d26: 84, + 0x10d27: 84, + 0x10eab: 84, + 0x10eac: 84, + 0x10efd: 84, + 0x10efe: 84, + 0x10eff: 84, 0x10f30: 68, 0x10f31: 68, 0x10f32: 68, @@ -807,7 +1899,17 @@ 0x10f42: 68, 0x10f43: 68, 0x10f44: 68, - 0x10f45: 85, + 0x10f46: 84, + 0x10f47: 84, + 0x10f48: 84, + 0x10f49: 84, + 0x10f4a: 84, + 0x10f4b: 84, + 0x10f4c: 84, + 0x10f4d: 84, + 0x10f4e: 84, + 0x10f4f: 84, + 0x10f50: 84, 0x10f51: 68, 0x10f52: 68, 0x10f53: 68, @@ -830,14 +1932,16 @@ 0x10f7f: 68, 0x10f80: 68, 0x10f81: 68, + 0x10f82: 84, + 0x10f83: 84, + 0x10f84: 84, + 0x10f85: 84, 0x10fb0: 68, - 0x10fb1: 85, 0x10fb2: 68, 0x10fb3: 68, 0x10fb4: 82, 0x10fb5: 82, 0x10fb6: 82, - 0x10fb7: 85, 0x10fb8: 68, 0x10fb9: 82, 0x10fba: 82, @@ -846,20 +1950,668 @@ 0x10fbd: 82, 0x10fbe: 68, 0x10fbf: 68, - 0x10fc0: 85, 0x10fc1: 68, 0x10fc2: 82, 0x10fc3: 82, 0x10fc4: 68, - 0x10fc5: 85, - 0x10fc6: 85, - 0x10fc7: 85, - 0x10fc8: 85, 0x10fc9: 82, 0x10fca: 68, 0x10fcb: 76, - 0x110bd: 85, - 0x110cd: 85, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103a: 84, + 0x1103b: 84, + 0x1103c: 84, + 0x1103d: 84, + 0x1103e: 84, + 0x1103f: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107f: 84, + 0x11080: 84, + 0x11081: 84, + 0x110b3: 84, + 0x110b4: 84, + 0x110b5: 84, + 0x110b6: 84, + 0x110b9: 84, + 0x110ba: 84, + 0x110c2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112a: 84, + 0x1112b: 84, + 0x1112d: 84, + 0x1112e: 84, + 0x1112f: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111b6: 84, + 0x111b7: 84, + 0x111b8: 84, + 0x111b9: 84, + 0x111ba: 84, + 0x111bb: 84, + 0x111bc: 84, + 0x111bd: 84, + 0x111be: 84, + 0x111c9: 84, + 0x111ca: 84, + 0x111cb: 84, + 0x111cc: 84, + 0x111cf: 84, + 0x1122f: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123e: 84, + 0x11241: 84, + 0x112df: 84, + 0x112e3: 84, + 0x112e4: 84, + 0x112e5: 84, + 0x112e6: 84, + 0x112e7: 84, + 0x112e8: 84, + 0x112e9: 84, + 0x112ea: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133b: 84, + 0x1133c: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136a: 84, + 0x1136b: 84, + 0x1136c: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143a: 84, + 0x1143b: 84, + 0x1143c: 84, + 0x1143d: 84, + 0x1143e: 84, + 0x1143f: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145e: 84, + 0x114b3: 84, + 0x114b4: 84, + 0x114b5: 84, + 0x114b6: 84, + 0x114b7: 84, + 0x114b8: 84, + 0x114ba: 84, + 0x114bf: 84, + 0x114c0: 84, + 0x114c2: 84, + 0x114c3: 84, + 0x115b2: 84, + 0x115b3: 84, + 0x115b4: 84, + 0x115b5: 84, + 0x115bc: 84, + 0x115bd: 84, + 0x115bf: 84, + 0x115c0: 84, + 0x115dc: 84, + 0x115dd: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163a: 84, + 0x1163d: 84, + 0x1163f: 84, + 0x11640: 84, + 0x116ab: 84, + 0x116ad: 84, + 0x116b0: 84, + 0x116b1: 84, + 0x116b2: 84, + 0x116b3: 84, + 0x116b4: 84, + 0x116b5: 84, + 0x116b7: 84, + 0x1171d: 84, + 0x1171e: 84, + 0x1171f: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172a: 84, + 0x1172b: 84, + 0x1182f: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183a: 84, + 0x1193b: 84, + 0x1193c: 84, + 0x1193e: 84, + 0x11943: 84, + 0x119d4: 84, + 0x119d5: 84, + 0x119d6: 84, + 0x119d7: 84, + 0x119da: 84, + 0x119db: 84, + 0x119e0: 84, + 0x11a01: 84, + 0x11a02: 84, + 0x11a03: 84, + 0x11a04: 84, + 0x11a05: 84, + 0x11a06: 84, + 0x11a07: 84, + 0x11a08: 84, + 0x11a09: 84, + 0x11a0a: 84, + 0x11a33: 84, + 0x11a34: 84, + 0x11a35: 84, + 0x11a36: 84, + 0x11a37: 84, + 0x11a38: 84, + 0x11a3b: 84, + 0x11a3c: 84, + 0x11a3d: 84, + 0x11a3e: 84, + 0x11a47: 84, + 0x11a51: 84, + 0x11a52: 84, + 0x11a53: 84, + 0x11a54: 84, + 0x11a55: 84, + 0x11a56: 84, + 0x11a59: 84, + 0x11a5a: 84, + 0x11a5b: 84, + 0x11a8a: 84, + 0x11a8b: 84, + 0x11a8c: 84, + 0x11a8d: 84, + 0x11a8e: 84, + 0x11a8f: 84, + 0x11a90: 84, + 0x11a91: 84, + 0x11a92: 84, + 0x11a93: 84, + 0x11a94: 84, + 0x11a95: 84, + 0x11a96: 84, + 0x11a98: 84, + 0x11a99: 84, + 0x11c30: 84, + 0x11c31: 84, + 0x11c32: 84, + 0x11c33: 84, + 0x11c34: 84, + 0x11c35: 84, + 0x11c36: 84, + 0x11c38: 84, + 0x11c39: 84, + 0x11c3a: 84, + 0x11c3b: 84, + 0x11c3c: 84, + 0x11c3d: 84, + 0x11c3f: 84, + 0x11c92: 84, + 0x11c93: 84, + 0x11c94: 84, + 0x11c95: 84, + 0x11c96: 84, + 0x11c97: 84, + 0x11c98: 84, + 0x11c99: 84, + 0x11c9a: 84, + 0x11c9b: 84, + 0x11c9c: 84, + 0x11c9d: 84, + 0x11c9e: 84, + 0x11c9f: 84, + 0x11ca0: 84, + 0x11ca1: 84, + 0x11ca2: 84, + 0x11ca3: 84, + 0x11ca4: 84, + 0x11ca5: 84, + 0x11ca6: 84, + 0x11ca7: 84, + 0x11caa: 84, + 0x11cab: 84, + 0x11cac: 84, + 0x11cad: 84, + 0x11cae: 84, + 0x11caf: 84, + 0x11cb0: 84, + 0x11cb2: 84, + 0x11cb3: 84, + 0x11cb5: 84, + 0x11cb6: 84, + 0x11d31: 84, + 0x11d32: 84, + 0x11d33: 84, + 0x11d34: 84, + 0x11d35: 84, + 0x11d36: 84, + 0x11d3a: 84, + 0x11d3c: 84, + 0x11d3d: 84, + 0x11d3f: 84, + 0x11d40: 84, + 0x11d41: 84, + 0x11d42: 84, + 0x11d43: 84, + 0x11d44: 84, + 0x11d45: 84, + 0x11d47: 84, + 0x11d90: 84, + 0x11d91: 84, + 0x11d95: 84, + 0x11d97: 84, + 0x11ef3: 84, + 0x11ef4: 84, + 0x11f00: 84, + 0x11f01: 84, + 0x11f36: 84, + 0x11f37: 84, + 0x11f38: 84, + 0x11f39: 84, + 0x11f3a: 84, + 0x11f40: 84, + 0x11f42: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343a: 84, + 0x1343b: 84, + 0x1343c: 84, + 0x1343d: 84, + 0x1343e: 84, + 0x1343f: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344a: 84, + 0x1344b: 84, + 0x1344c: 84, + 0x1344d: 84, + 0x1344e: 84, + 0x1344f: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x16af0: 84, + 0x16af1: 84, + 0x16af2: 84, + 0x16af3: 84, + 0x16af4: 84, + 0x16b30: 84, + 0x16b31: 84, + 0x16b32: 84, + 0x16b33: 84, + 0x16b34: 84, + 0x16b35: 84, + 0x16b36: 84, + 0x16f4f: 84, + 0x16f8f: 84, + 0x16f90: 84, + 0x16f91: 84, + 0x16f92: 84, + 0x16fe4: 84, + 0x1bc9d: 84, + 0x1bc9e: 84, + 0x1bca0: 84, + 0x1bca1: 84, + 0x1bca2: 84, + 0x1bca3: 84, + 0x1cf00: 84, + 0x1cf01: 84, + 0x1cf02: 84, + 0x1cf03: 84, + 0x1cf04: 84, + 0x1cf05: 84, + 0x1cf06: 84, + 0x1cf07: 84, + 0x1cf08: 84, + 0x1cf09: 84, + 0x1cf0a: 84, + 0x1cf0b: 84, + 0x1cf0c: 84, + 0x1cf0d: 84, + 0x1cf0e: 84, + 0x1cf0f: 84, + 0x1cf10: 84, + 0x1cf11: 84, + 0x1cf12: 84, + 0x1cf13: 84, + 0x1cf14: 84, + 0x1cf15: 84, + 0x1cf16: 84, + 0x1cf17: 84, + 0x1cf18: 84, + 0x1cf19: 84, + 0x1cf1a: 84, + 0x1cf1b: 84, + 0x1cf1c: 84, + 0x1cf1d: 84, + 0x1cf1e: 84, + 0x1cf1f: 84, + 0x1cf20: 84, + 0x1cf21: 84, + 0x1cf22: 84, + 0x1cf23: 84, + 0x1cf24: 84, + 0x1cf25: 84, + 0x1cf26: 84, + 0x1cf27: 84, + 0x1cf28: 84, + 0x1cf29: 84, + 0x1cf2a: 84, + 0x1cf2b: 84, + 0x1cf2c: 84, + 0x1cf2d: 84, + 0x1cf30: 84, + 0x1cf31: 84, + 0x1cf32: 84, + 0x1cf33: 84, + 0x1cf34: 84, + 0x1cf35: 84, + 0x1cf36: 84, + 0x1cf37: 84, + 0x1cf38: 84, + 0x1cf39: 84, + 0x1cf3a: 84, + 0x1cf3b: 84, + 0x1cf3c: 84, + 0x1cf3d: 84, + 0x1cf3e: 84, + 0x1cf3f: 84, + 0x1cf40: 84, + 0x1cf41: 84, + 0x1cf42: 84, + 0x1cf43: 84, + 0x1cf44: 84, + 0x1cf45: 84, + 0x1cf46: 84, + 0x1d167: 84, + 0x1d168: 84, + 0x1d169: 84, + 0x1d173: 84, + 0x1d174: 84, + 0x1d175: 84, + 0x1d176: 84, + 0x1d177: 84, + 0x1d178: 84, + 0x1d179: 84, + 0x1d17a: 84, + 0x1d17b: 84, + 0x1d17c: 84, + 0x1d17d: 84, + 0x1d17e: 84, + 0x1d17f: 84, + 0x1d180: 84, + 0x1d181: 84, + 0x1d182: 84, + 0x1d185: 84, + 0x1d186: 84, + 0x1d187: 84, + 0x1d188: 84, + 0x1d189: 84, + 0x1d18a: 84, + 0x1d18b: 84, + 0x1d1aa: 84, + 0x1d1ab: 84, + 0x1d1ac: 84, + 0x1d1ad: 84, + 0x1d242: 84, + 0x1d243: 84, + 0x1d244: 84, + 0x1da00: 84, + 0x1da01: 84, + 0x1da02: 84, + 0x1da03: 84, + 0x1da04: 84, + 0x1da05: 84, + 0x1da06: 84, + 0x1da07: 84, + 0x1da08: 84, + 0x1da09: 84, + 0x1da0a: 84, + 0x1da0b: 84, + 0x1da0c: 84, + 0x1da0d: 84, + 0x1da0e: 84, + 0x1da0f: 84, + 0x1da10: 84, + 0x1da11: 84, + 0x1da12: 84, + 0x1da13: 84, + 0x1da14: 84, + 0x1da15: 84, + 0x1da16: 84, + 0x1da17: 84, + 0x1da18: 84, + 0x1da19: 84, + 0x1da1a: 84, + 0x1da1b: 84, + 0x1da1c: 84, + 0x1da1d: 84, + 0x1da1e: 84, + 0x1da1f: 84, + 0x1da20: 84, + 0x1da21: 84, + 0x1da22: 84, + 0x1da23: 84, + 0x1da24: 84, + 0x1da25: 84, + 0x1da26: 84, + 0x1da27: 84, + 0x1da28: 84, + 0x1da29: 84, + 0x1da2a: 84, + 0x1da2b: 84, + 0x1da2c: 84, + 0x1da2d: 84, + 0x1da2e: 84, + 0x1da2f: 84, + 0x1da30: 84, + 0x1da31: 84, + 0x1da32: 84, + 0x1da33: 84, + 0x1da34: 84, + 0x1da35: 84, + 0x1da36: 84, + 0x1da3b: 84, + 0x1da3c: 84, + 0x1da3d: 84, + 0x1da3e: 84, + 0x1da3f: 84, + 0x1da40: 84, + 0x1da41: 84, + 0x1da42: 84, + 0x1da43: 84, + 0x1da44: 84, + 0x1da45: 84, + 0x1da46: 84, + 0x1da47: 84, + 0x1da48: 84, + 0x1da49: 84, + 0x1da4a: 84, + 0x1da4b: 84, + 0x1da4c: 84, + 0x1da4d: 84, + 0x1da4e: 84, + 0x1da4f: 84, + 0x1da50: 84, + 0x1da51: 84, + 0x1da52: 84, + 0x1da53: 84, + 0x1da54: 84, + 0x1da55: 84, + 0x1da56: 84, + 0x1da57: 84, + 0x1da58: 84, + 0x1da59: 84, + 0x1da5a: 84, + 0x1da5b: 84, + 0x1da5c: 84, + 0x1da5d: 84, + 0x1da5e: 84, + 0x1da5f: 84, + 0x1da60: 84, + 0x1da61: 84, + 0x1da62: 84, + 0x1da63: 84, + 0x1da64: 84, + 0x1da65: 84, + 0x1da66: 84, + 0x1da67: 84, + 0x1da68: 84, + 0x1da69: 84, + 0x1da6a: 84, + 0x1da6b: 84, + 0x1da6c: 84, + 0x1da75: 84, + 0x1da84: 84, + 0x1da9b: 84, + 0x1da9c: 84, + 0x1da9d: 84, + 0x1da9e: 84, + 0x1da9f: 84, + 0x1daa1: 84, + 0x1daa2: 84, + 0x1daa3: 84, + 0x1daa4: 84, + 0x1daa5: 84, + 0x1daa6: 84, + 0x1daa7: 84, + 0x1daa8: 84, + 0x1daa9: 84, + 0x1daaa: 84, + 0x1daab: 84, + 0x1daac: 84, + 0x1daad: 84, + 0x1daae: 84, + 0x1daaf: 84, + 0x1e000: 84, + 0x1e001: 84, + 0x1e002: 84, + 0x1e003: 84, + 0x1e004: 84, + 0x1e005: 84, + 0x1e006: 84, + 0x1e008: 84, + 0x1e009: 84, + 0x1e00a: 84, + 0x1e00b: 84, + 0x1e00c: 84, + 0x1e00d: 84, + 0x1e00e: 84, + 0x1e00f: 84, + 0x1e010: 84, + 0x1e011: 84, + 0x1e012: 84, + 0x1e013: 84, + 0x1e014: 84, + 0x1e015: 84, + 0x1e016: 84, + 0x1e017: 84, + 0x1e018: 84, + 0x1e01b: 84, + 0x1e01c: 84, + 0x1e01d: 84, + 0x1e01e: 84, + 0x1e01f: 84, + 0x1e020: 84, + 0x1e021: 84, + 0x1e023: 84, + 0x1e024: 84, + 0x1e026: 84, + 0x1e027: 84, + 0x1e028: 84, + 0x1e029: 84, + 0x1e02a: 84, + 0x1e08f: 84, + 0x1e130: 84, + 0x1e131: 84, + 0x1e132: 84, + 0x1e133: 84, + 0x1e134: 84, + 0x1e135: 84, + 0x1e136: 84, + 0x1e2ae: 84, + 0x1e2ec: 84, + 0x1e2ed: 84, + 0x1e2ee: 84, + 0x1e2ef: 84, + 0x1e4ec: 84, + 0x1e4ed: 84, + 0x1e4ee: 84, + 0x1e4ef: 84, + 0x1e8d0: 84, + 0x1e8d1: 84, + 0x1e8d2: 84, + 0x1e8d3: 84, + 0x1e8d4: 84, + 0x1e8d5: 84, + 0x1e8d6: 84, 0x1e900: 68, 0x1e901: 68, 0x1e902: 68, @@ -928,7 +2680,351 @@ 0x1e941: 68, 0x1e942: 68, 0x1e943: 68, + 0x1e944: 84, + 0x1e945: 84, + 0x1e946: 84, + 0x1e947: 84, + 0x1e948: 84, + 0x1e949: 84, + 0x1e94a: 84, 0x1e94b: 84, + 0xe0001: 84, + 0xe0020: 84, + 0xe0021: 84, + 0xe0022: 84, + 0xe0023: 84, + 0xe0024: 84, + 0xe0025: 84, + 0xe0026: 84, + 0xe0027: 84, + 0xe0028: 84, + 0xe0029: 84, + 0xe002a: 84, + 0xe002b: 84, + 0xe002c: 84, + 0xe002d: 84, + 0xe002e: 84, + 0xe002f: 84, + 0xe0030: 84, + 0xe0031: 84, + 0xe0032: 84, + 0xe0033: 84, + 0xe0034: 84, + 0xe0035: 84, + 0xe0036: 84, + 0xe0037: 84, + 0xe0038: 84, + 0xe0039: 84, + 0xe003a: 84, + 0xe003b: 84, + 0xe003c: 84, + 0xe003d: 84, + 0xe003e: 84, + 0xe003f: 84, + 0xe0040: 84, + 0xe0041: 84, + 0xe0042: 84, + 0xe0043: 84, + 0xe0044: 84, + 0xe0045: 84, + 0xe0046: 84, + 0xe0047: 84, + 0xe0048: 84, + 0xe0049: 84, + 0xe004a: 84, + 0xe004b: 84, + 0xe004c: 84, + 0xe004d: 84, + 0xe004e: 84, + 0xe004f: 84, + 0xe0050: 84, + 0xe0051: 84, + 0xe0052: 84, + 0xe0053: 84, + 0xe0054: 84, + 0xe0055: 84, + 0xe0056: 84, + 0xe0057: 84, + 0xe0058: 84, + 0xe0059: 84, + 0xe005a: 84, + 0xe005b: 84, + 0xe005c: 84, + 0xe005d: 84, + 0xe005e: 84, + 0xe005f: 84, + 0xe0060: 84, + 0xe0061: 84, + 0xe0062: 84, + 0xe0063: 84, + 0xe0064: 84, + 0xe0065: 84, + 0xe0066: 84, + 0xe0067: 84, + 0xe0068: 84, + 0xe0069: 84, + 0xe006a: 84, + 0xe006b: 84, + 0xe006c: 84, + 0xe006d: 84, + 0xe006e: 84, + 0xe006f: 84, + 0xe0070: 84, + 0xe0071: 84, + 0xe0072: 84, + 0xe0073: 84, + 0xe0074: 84, + 0xe0075: 84, + 0xe0076: 84, + 0xe0077: 84, + 0xe0078: 84, + 0xe0079: 84, + 0xe007a: 84, + 0xe007b: 84, + 0xe007c: 84, + 0xe007d: 84, + 0xe007e: 84, + 0xe007f: 84, + 0xe0100: 84, + 0xe0101: 84, + 0xe0102: 84, + 0xe0103: 84, + 0xe0104: 84, + 0xe0105: 84, + 0xe0106: 84, + 0xe0107: 84, + 0xe0108: 84, + 0xe0109: 84, + 0xe010a: 84, + 0xe010b: 84, + 0xe010c: 84, + 0xe010d: 84, + 0xe010e: 84, + 0xe010f: 84, + 0xe0110: 84, + 0xe0111: 84, + 0xe0112: 84, + 0xe0113: 84, + 0xe0114: 84, + 0xe0115: 84, + 0xe0116: 84, + 0xe0117: 84, + 0xe0118: 84, + 0xe0119: 84, + 0xe011a: 84, + 0xe011b: 84, + 0xe011c: 84, + 0xe011d: 84, + 0xe011e: 84, + 0xe011f: 84, + 0xe0120: 84, + 0xe0121: 84, + 0xe0122: 84, + 0xe0123: 84, + 0xe0124: 84, + 0xe0125: 84, + 0xe0126: 84, + 0xe0127: 84, + 0xe0128: 84, + 0xe0129: 84, + 0xe012a: 84, + 0xe012b: 84, + 0xe012c: 84, + 0xe012d: 84, + 0xe012e: 84, + 0xe012f: 84, + 0xe0130: 84, + 0xe0131: 84, + 0xe0132: 84, + 0xe0133: 84, + 0xe0134: 84, + 0xe0135: 84, + 0xe0136: 84, + 0xe0137: 84, + 0xe0138: 84, + 0xe0139: 84, + 0xe013a: 84, + 0xe013b: 84, + 0xe013c: 84, + 0xe013d: 84, + 0xe013e: 84, + 0xe013f: 84, + 0xe0140: 84, + 0xe0141: 84, + 0xe0142: 84, + 0xe0143: 84, + 0xe0144: 84, + 0xe0145: 84, + 0xe0146: 84, + 0xe0147: 84, + 0xe0148: 84, + 0xe0149: 84, + 0xe014a: 84, + 0xe014b: 84, + 0xe014c: 84, + 0xe014d: 84, + 0xe014e: 84, + 0xe014f: 84, + 0xe0150: 84, + 0xe0151: 84, + 0xe0152: 84, + 0xe0153: 84, + 0xe0154: 84, + 0xe0155: 84, + 0xe0156: 84, + 0xe0157: 84, + 0xe0158: 84, + 0xe0159: 84, + 0xe015a: 84, + 0xe015b: 84, + 0xe015c: 84, + 0xe015d: 84, + 0xe015e: 84, + 0xe015f: 84, + 0xe0160: 84, + 0xe0161: 84, + 0xe0162: 84, + 0xe0163: 84, + 0xe0164: 84, + 0xe0165: 84, + 0xe0166: 84, + 0xe0167: 84, + 0xe0168: 84, + 0xe0169: 84, + 0xe016a: 84, + 0xe016b: 84, + 0xe016c: 84, + 0xe016d: 84, + 0xe016e: 84, + 0xe016f: 84, + 0xe0170: 84, + 0xe0171: 84, + 0xe0172: 84, + 0xe0173: 84, + 0xe0174: 84, + 0xe0175: 84, + 0xe0176: 84, + 0xe0177: 84, + 0xe0178: 84, + 0xe0179: 84, + 0xe017a: 84, + 0xe017b: 84, + 0xe017c: 84, + 0xe017d: 84, + 0xe017e: 84, + 0xe017f: 84, + 0xe0180: 84, + 0xe0181: 84, + 0xe0182: 84, + 0xe0183: 84, + 0xe0184: 84, + 0xe0185: 84, + 0xe0186: 84, + 0xe0187: 84, + 0xe0188: 84, + 0xe0189: 84, + 0xe018a: 84, + 0xe018b: 84, + 0xe018c: 84, + 0xe018d: 84, + 0xe018e: 84, + 0xe018f: 84, + 0xe0190: 84, + 0xe0191: 84, + 0xe0192: 84, + 0xe0193: 84, + 0xe0194: 84, + 0xe0195: 84, + 0xe0196: 84, + 0xe0197: 84, + 0xe0198: 84, + 0xe0199: 84, + 0xe019a: 84, + 0xe019b: 84, + 0xe019c: 84, + 0xe019d: 84, + 0xe019e: 84, + 0xe019f: 84, + 0xe01a0: 84, + 0xe01a1: 84, + 0xe01a2: 84, + 0xe01a3: 84, + 0xe01a4: 84, + 0xe01a5: 84, + 0xe01a6: 84, + 0xe01a7: 84, + 0xe01a8: 84, + 0xe01a9: 84, + 0xe01aa: 84, + 0xe01ab: 84, + 0xe01ac: 84, + 0xe01ad: 84, + 0xe01ae: 84, + 0xe01af: 84, + 0xe01b0: 84, + 0xe01b1: 84, + 0xe01b2: 84, + 0xe01b3: 84, + 0xe01b4: 84, + 0xe01b5: 84, + 0xe01b6: 84, + 0xe01b7: 84, + 0xe01b8: 84, + 0xe01b9: 84, + 0xe01ba: 84, + 0xe01bb: 84, + 0xe01bc: 84, + 0xe01bd: 84, + 0xe01be: 84, + 0xe01bf: 84, + 0xe01c0: 84, + 0xe01c1: 84, + 0xe01c2: 84, + 0xe01c3: 84, + 0xe01c4: 84, + 0xe01c5: 84, + 0xe01c6: 84, + 0xe01c7: 84, + 0xe01c8: 84, + 0xe01c9: 84, + 0xe01ca: 84, + 0xe01cb: 84, + 0xe01cc: 84, + 0xe01cd: 84, + 0xe01ce: 84, + 0xe01cf: 84, + 0xe01d0: 84, + 0xe01d1: 84, + 0xe01d2: 84, + 0xe01d3: 84, + 0xe01d4: 84, + 0xe01d5: 84, + 0xe01d6: 84, + 0xe01d7: 84, + 0xe01d8: 84, + 0xe01d9: 84, + 0xe01da: 84, + 0xe01db: 84, + 0xe01dc: 84, + 0xe01dd: 84, + 0xe01de: 84, + 0xe01df: 84, + 0xe01e0: 84, + 0xe01e1: 84, + 0xe01e2: 84, + 0xe01e3: 84, + 0xe01e4: 84, + 0xe01e5: 84, + 0xe01e6: 84, + 0xe01e7: 84, + 0xe01e8: 84, + 0xe01e9: 84, + 0xe01ea: 84, + 0xe01eb: 84, + 0xe01ec: 84, + 0xe01ed: 84, + 0xe01ee: 84, + 0xe01ef: 84, } codepoint_classes = { 'PVALID': ( @@ -2110,7 +4206,6 @@ 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, - 0x1e0300001e06e, 0x1e08f0001e090, 0x1e1000001e12d, 0x1e1300001e13e, diff --git a/src/pip/_vendor/idna/package_data.py b/src/pip/_vendor/idna/package_data.py index c5b7220c970..ed811133633 100644 --- a/src/pip/_vendor/idna/package_data.py +++ b/src/pip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '3.6' +__version__ = '3.7' diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index ab207435d0d..15406823f3e 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -10,7 +10,7 @@ pyproject-hooks==1.0.0 requests==2.31.0 certifi==2024.2.2 chardet==5.2.0 - idna==3.6 + idna==3.7 urllib3==1.26.18 rich==13.7.0 pygments==2.17.2 From 26ab64a3534bf2f575ccee2af61bacccc3d8c3ee Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 21:04:52 +0100 Subject: [PATCH 5/7] Upgrade rich to 13.7.1 --- news/rich.vendor.rst | 2 +- src/pip/_vendor/rich/_cell_widths.py | 367 ++++++++++++++------------- src/pip/_vendor/rich/text.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 188 insertions(+), 185 deletions(-) diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst index 586a4617228..b3c8f257742 100644 --- a/news/rich.vendor.rst +++ b/news/rich.vendor.rst @@ -1 +1 @@ -Upgrade rich to 13.7.0 +Upgrade rich to 13.7.1 diff --git a/src/pip/_vendor/rich/_cell_widths.py b/src/pip/_vendor/rich/_cell_widths.py index 36286df379e..608ae3a75d1 100644 --- a/src/pip/_vendor/rich/_cell_widths.py +++ b/src/pip/_vendor/rich/_cell_widths.py @@ -4,6 +4,7 @@ (0, 0, 0), (1, 31, -1), (127, 159, -1), + (173, 173, 0), (768, 879, 0), (1155, 1161, 0), (1425, 1469, 0), @@ -11,13 +12,16 @@ (1473, 1474, 0), (1476, 1477, 0), (1479, 1479, 0), + (1536, 1541, 0), (1552, 1562, 0), + (1564, 1564, 0), (1611, 1631, 0), (1648, 1648, 0), - (1750, 1756, 0), + (1750, 1757, 0), (1759, 1764, 0), (1767, 1768, 0), (1770, 1773, 0), + (1807, 1807, 0), (1809, 1809, 0), (1840, 1866, 0), (1958, 1968, 0), @@ -28,149 +32,137 @@ (2085, 2087, 0), (2089, 2093, 0), (2137, 2139, 0), - (2259, 2273, 0), - (2275, 2306, 0), - (2362, 2362, 0), - (2364, 2364, 0), - (2369, 2376, 0), - (2381, 2381, 0), + (2192, 2193, 0), + (2200, 2207, 0), + (2250, 2307, 0), + (2362, 2364, 0), + (2366, 2383, 0), (2385, 2391, 0), (2402, 2403, 0), - (2433, 2433, 0), + (2433, 2435, 0), (2492, 2492, 0), - (2497, 2500, 0), - (2509, 2509, 0), + (2494, 2500, 0), + (2503, 2504, 0), + (2507, 2509, 0), + (2519, 2519, 0), (2530, 2531, 0), (2558, 2558, 0), - (2561, 2562, 0), + (2561, 2563, 0), (2620, 2620, 0), - (2625, 2626, 0), + (2622, 2626, 0), (2631, 2632, 0), (2635, 2637, 0), (2641, 2641, 0), (2672, 2673, 0), (2677, 2677, 0), - (2689, 2690, 0), + (2689, 2691, 0), (2748, 2748, 0), - (2753, 2757, 0), - (2759, 2760, 0), - (2765, 2765, 0), + (2750, 2757, 0), + (2759, 2761, 0), + (2763, 2765, 0), (2786, 2787, 0), (2810, 2815, 0), - (2817, 2817, 0), + (2817, 2819, 0), (2876, 2876, 0), - (2879, 2879, 0), - (2881, 2884, 0), - (2893, 2893, 0), - (2901, 2902, 0), + (2878, 2884, 0), + (2887, 2888, 0), + (2891, 2893, 0), + (2901, 2903, 0), (2914, 2915, 0), (2946, 2946, 0), - (3008, 3008, 0), - (3021, 3021, 0), - (3072, 3072, 0), - (3076, 3076, 0), - (3134, 3136, 0), + (3006, 3010, 0), + (3014, 3016, 0), + (3018, 3021, 0), + (3031, 3031, 0), + (3072, 3076, 0), + (3132, 3132, 0), + (3134, 3140, 0), (3142, 3144, 0), (3146, 3149, 0), (3157, 3158, 0), (3170, 3171, 0), - (3201, 3201, 0), + (3201, 3203, 0), (3260, 3260, 0), - (3263, 3263, 0), - (3270, 3270, 0), - (3276, 3277, 0), + (3262, 3268, 0), + (3270, 3272, 0), + (3274, 3277, 0), + (3285, 3286, 0), (3298, 3299, 0), - (3328, 3329, 0), + (3315, 3315, 0), + (3328, 3331, 0), (3387, 3388, 0), - (3393, 3396, 0), - (3405, 3405, 0), + (3390, 3396, 0), + (3398, 3400, 0), + (3402, 3405, 0), + (3415, 3415, 0), (3426, 3427, 0), - (3457, 3457, 0), + (3457, 3459, 0), (3530, 3530, 0), - (3538, 3540, 0), + (3535, 3540, 0), (3542, 3542, 0), + (3544, 3551, 0), + (3570, 3571, 0), (3633, 3633, 0), (3636, 3642, 0), (3655, 3662, 0), (3761, 3761, 0), (3764, 3772, 0), - (3784, 3789, 0), + (3784, 3790, 0), (3864, 3865, 0), (3893, 3893, 0), (3895, 3895, 0), (3897, 3897, 0), - (3953, 3966, 0), - (3968, 3972, 0), + (3902, 3903, 0), + (3953, 3972, 0), (3974, 3975, 0), (3981, 3991, 0), (3993, 4028, 0), (4038, 4038, 0), - (4141, 4144, 0), - (4146, 4151, 0), - (4153, 4154, 0), - (4157, 4158, 0), - (4184, 4185, 0), + (4139, 4158, 0), + (4182, 4185, 0), (4190, 4192, 0), + (4194, 4196, 0), + (4199, 4205, 0), (4209, 4212, 0), - (4226, 4226, 0), - (4229, 4230, 0), - (4237, 4237, 0), - (4253, 4253, 0), + (4226, 4237, 0), + (4239, 4239, 0), + (4250, 4253, 0), (4352, 4447, 2), + (4448, 4607, 0), (4957, 4959, 0), - (5906, 5908, 0), + (5906, 5909, 0), (5938, 5940, 0), (5970, 5971, 0), (6002, 6003, 0), - (6068, 6069, 0), - (6071, 6077, 0), - (6086, 6086, 0), - (6089, 6099, 0), + (6068, 6099, 0), (6109, 6109, 0), - (6155, 6157, 0), + (6155, 6159, 0), (6277, 6278, 0), (6313, 6313, 0), - (6432, 6434, 0), - (6439, 6440, 0), - (6450, 6450, 0), - (6457, 6459, 0), - (6679, 6680, 0), - (6683, 6683, 0), - (6742, 6742, 0), - (6744, 6750, 0), - (6752, 6752, 0), - (6754, 6754, 0), - (6757, 6764, 0), - (6771, 6780, 0), + (6432, 6443, 0), + (6448, 6459, 0), + (6679, 6683, 0), + (6741, 6750, 0), + (6752, 6780, 0), (6783, 6783, 0), - (6832, 6848, 0), - (6912, 6915, 0), - (6964, 6964, 0), - (6966, 6970, 0), - (6972, 6972, 0), - (6978, 6978, 0), + (6832, 6862, 0), + (6912, 6916, 0), + (6964, 6980, 0), (7019, 7027, 0), - (7040, 7041, 0), - (7074, 7077, 0), - (7080, 7081, 0), - (7083, 7085, 0), - (7142, 7142, 0), - (7144, 7145, 0), - (7149, 7149, 0), - (7151, 7153, 0), - (7212, 7219, 0), - (7222, 7223, 0), + (7040, 7042, 0), + (7073, 7085, 0), + (7142, 7155, 0), + (7204, 7223, 0), (7376, 7378, 0), - (7380, 7392, 0), - (7394, 7400, 0), + (7380, 7400, 0), (7405, 7405, 0), (7412, 7412, 0), - (7416, 7417, 0), - (7616, 7673, 0), - (7675, 7679, 0), + (7415, 7417, 0), + (7616, 7679, 0), (8203, 8207, 0), (8232, 8238, 0), - (8288, 8291, 0), + (8288, 8292, 0), + (8294, 8303, 0), (8400, 8432, 0), (8986, 8987, 2), (9001, 9002, 2), @@ -212,17 +204,16 @@ (11904, 11929, 2), (11931, 12019, 2), (12032, 12245, 2), - (12272, 12283, 2), - (12288, 12329, 2), - (12330, 12333, 0), - (12334, 12350, 2), + (12272, 12329, 2), + (12330, 12335, 0), + (12336, 12350, 2), (12353, 12438, 2), (12441, 12442, 0), (12443, 12543, 2), (12549, 12591, 2), (12593, 12686, 2), (12688, 12771, 2), - (12784, 12830, 2), + (12783, 12830, 2), (12832, 12871, 2), (12880, 19903, 2), (19968, 42124, 2), @@ -234,36 +225,33 @@ (43010, 43010, 0), (43014, 43014, 0), (43019, 43019, 0), - (43045, 43046, 0), + (43043, 43047, 0), (43052, 43052, 0), - (43204, 43205, 0), + (43136, 43137, 0), + (43188, 43205, 0), (43232, 43249, 0), (43263, 43263, 0), (43302, 43309, 0), - (43335, 43345, 0), + (43335, 43347, 0), (43360, 43388, 2), - (43392, 43394, 0), - (43443, 43443, 0), - (43446, 43449, 0), - (43452, 43453, 0), + (43392, 43395, 0), + (43443, 43456, 0), (43493, 43493, 0), - (43561, 43566, 0), - (43569, 43570, 0), - (43573, 43574, 0), + (43561, 43574, 0), (43587, 43587, 0), - (43596, 43596, 0), - (43644, 43644, 0), + (43596, 43597, 0), + (43643, 43645, 0), (43696, 43696, 0), (43698, 43700, 0), (43703, 43704, 0), (43710, 43711, 0), (43713, 43713, 0), - (43756, 43757, 0), - (43766, 43766, 0), - (44005, 44005, 0), - (44008, 44008, 0), - (44013, 44013, 0), + (43755, 43759, 0), + (43765, 43766, 0), + (44003, 44010, 0), + (44012, 44013, 0), (44032, 55203, 2), + (55216, 55295, 0), (63744, 64255, 2), (64286, 64286, 0), (65024, 65039, 0), @@ -272,8 +260,10 @@ (65072, 65106, 2), (65108, 65126, 2), (65128, 65131, 2), + (65279, 65279, 0), (65281, 65376, 2), (65504, 65510, 2), + (65529, 65531, 0), (66045, 66045, 0), (66272, 66272, 0), (66422, 66426, 0), @@ -285,102 +275,108 @@ (68325, 68326, 0), (68900, 68903, 0), (69291, 69292, 0), + (69373, 69375, 0), (69446, 69456, 0), - (69633, 69633, 0), + (69506, 69509, 0), + (69632, 69634, 0), (69688, 69702, 0), - (69759, 69761, 0), - (69811, 69814, 0), - (69817, 69818, 0), + (69744, 69744, 0), + (69747, 69748, 0), + (69759, 69762, 0), + (69808, 69818, 0), + (69821, 69821, 0), + (69826, 69826, 0), + (69837, 69837, 0), (69888, 69890, 0), - (69927, 69931, 0), - (69933, 69940, 0), + (69927, 69940, 0), + (69957, 69958, 0), (70003, 70003, 0), - (70016, 70017, 0), - (70070, 70078, 0), + (70016, 70018, 0), + (70067, 70080, 0), (70089, 70092, 0), - (70095, 70095, 0), - (70191, 70193, 0), - (70196, 70196, 0), - (70198, 70199, 0), + (70094, 70095, 0), + (70188, 70199, 0), (70206, 70206, 0), - (70367, 70367, 0), - (70371, 70378, 0), - (70400, 70401, 0), + (70209, 70209, 0), + (70367, 70378, 0), + (70400, 70403, 0), (70459, 70460, 0), - (70464, 70464, 0), + (70462, 70468, 0), + (70471, 70472, 0), + (70475, 70477, 0), + (70487, 70487, 0), + (70498, 70499, 0), (70502, 70508, 0), (70512, 70516, 0), - (70712, 70719, 0), - (70722, 70724, 0), - (70726, 70726, 0), + (70709, 70726, 0), (70750, 70750, 0), - (70835, 70840, 0), - (70842, 70842, 0), - (70847, 70848, 0), - (70850, 70851, 0), - (71090, 71093, 0), - (71100, 71101, 0), - (71103, 71104, 0), + (70832, 70851, 0), + (71087, 71093, 0), + (71096, 71104, 0), (71132, 71133, 0), - (71219, 71226, 0), - (71229, 71229, 0), - (71231, 71232, 0), - (71339, 71339, 0), - (71341, 71341, 0), - (71344, 71349, 0), - (71351, 71351, 0), - (71453, 71455, 0), - (71458, 71461, 0), - (71463, 71467, 0), - (71727, 71735, 0), - (71737, 71738, 0), - (71995, 71996, 0), - (71998, 71998, 0), - (72003, 72003, 0), - (72148, 72151, 0), - (72154, 72155, 0), - (72160, 72160, 0), + (71216, 71232, 0), + (71339, 71351, 0), + (71453, 71467, 0), + (71724, 71738, 0), + (71984, 71989, 0), + (71991, 71992, 0), + (71995, 71998, 0), + (72000, 72000, 0), + (72002, 72003, 0), + (72145, 72151, 0), + (72154, 72160, 0), + (72164, 72164, 0), (72193, 72202, 0), - (72243, 72248, 0), + (72243, 72249, 0), (72251, 72254, 0), (72263, 72263, 0), - (72273, 72278, 0), - (72281, 72283, 0), - (72330, 72342, 0), - (72344, 72345, 0), - (72752, 72758, 0), - (72760, 72765, 0), - (72767, 72767, 0), + (72273, 72283, 0), + (72330, 72345, 0), + (72751, 72758, 0), + (72760, 72767, 0), (72850, 72871, 0), - (72874, 72880, 0), - (72882, 72883, 0), - (72885, 72886, 0), + (72873, 72886, 0), (73009, 73014, 0), (73018, 73018, 0), (73020, 73021, 0), (73023, 73029, 0), (73031, 73031, 0), + (73098, 73102, 0), (73104, 73105, 0), - (73109, 73109, 0), - (73111, 73111, 0), - (73459, 73460, 0), + (73107, 73111, 0), + (73459, 73462, 0), + (73472, 73473, 0), + (73475, 73475, 0), + (73524, 73530, 0), + (73534, 73538, 0), + (78896, 78912, 0), + (78919, 78933, 0), (92912, 92916, 0), (92976, 92982, 0), (94031, 94031, 0), + (94033, 94087, 0), (94095, 94098, 0), (94176, 94179, 2), (94180, 94180, 0), - (94192, 94193, 2), + (94192, 94193, 0), (94208, 100343, 2), (100352, 101589, 2), (101632, 101640, 2), - (110592, 110878, 2), + (110576, 110579, 2), + (110581, 110587, 2), + (110589, 110590, 2), + (110592, 110882, 2), + (110898, 110898, 2), (110928, 110930, 2), + (110933, 110933, 2), (110948, 110951, 2), (110960, 111355, 2), (113821, 113822, 0), - (119143, 119145, 0), - (119163, 119170, 0), + (113824, 113827, 0), + (118528, 118573, 0), + (118576, 118598, 0), + (119141, 119145, 0), + (119149, 119170, 0), (119173, 119179, 0), (119210, 119213, 0), (119362, 119364, 0), @@ -395,8 +391,11 @@ (122907, 122913, 0), (122915, 122916, 0), (122918, 122922, 0), + (123023, 123023, 0), (123184, 123190, 0), + (123566, 123566, 0), (123628, 123631, 0), + (124140, 124143, 0), (125136, 125142, 0), (125252, 125258, 0), (126980, 126980, 2), @@ -416,7 +415,9 @@ (127951, 127955, 2), (127968, 127984, 2), (127988, 127988, 2), - (127992, 128062, 2), + (127992, 127994, 2), + (127995, 127999, 0), + (128000, 128062, 2), (128064, 128064, 2), (128066, 128252, 2), (128255, 128317, 2), @@ -430,22 +431,24 @@ (128716, 128716, 2), (128720, 128722, 2), (128725, 128727, 2), + (128732, 128735, 2), (128747, 128748, 2), (128756, 128764, 2), (128992, 129003, 2), + (129008, 129008, 2), (129292, 129338, 2), (129340, 129349, 2), - (129351, 129400, 2), - (129402, 129483, 2), - (129485, 129535, 2), - (129648, 129652, 2), - (129656, 129658, 2), - (129664, 129670, 2), - (129680, 129704, 2), - (129712, 129718, 2), - (129728, 129730, 2), - (129744, 129750, 2), + (129351, 129535, 2), + (129648, 129660, 2), + (129664, 129672, 2), + (129680, 129725, 2), + (129727, 129733, 2), + (129742, 129755, 2), + (129760, 129768, 2), + (129776, 129784, 2), (131072, 196605, 2), (196608, 262141, 2), + (917505, 917505, 0), + (917536, 917631, 0), (917760, 917999, 0), ] diff --git a/src/pip/_vendor/rich/text.py b/src/pip/_vendor/rich/text.py index 09f881e7296..209aa943483 100644 --- a/src/pip/_vendor/rich/text.py +++ b/src/pip/_vendor/rich/text.py @@ -38,7 +38,7 @@ _re_whitespace = re.compile(r"\s+$") TextType = Union[str, "Text"] -"""A plain string or a [Text][rich.text.Text] instance.""" +"""A plain string or a :class:`Text` instance.""" GetStyleCallable = Callable[[str], Optional[StyleType]] diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 15406823f3e..bba94517734 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -12,7 +12,7 @@ requests==2.31.0 chardet==5.2.0 idna==3.7 urllib3==1.26.18 -rich==13.7.0 +rich==13.7.1 pygments==2.17.2 typing_extensions==4.9.0 resolvelib==1.0.1 From f9d5e7d9f702472ec06f46aee3cd426439e2c7a9 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 21:05:34 +0100 Subject: [PATCH 6/7] Upgrade typing_extensions to 4.11.0 --- news/typing_extensions.vendor.rst | 2 +- src/pip/_vendor/typing_extensions.py | 433 ++++++++++++++++++++++----- src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 358 insertions(+), 79 deletions(-) diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst index d23d4bbced0..c605c366678 100644 --- a/news/typing_extensions.vendor.rst +++ b/news/typing_extensions.vendor.rst @@ -1 +1 @@ -Upgrade typing_extensions to 4.9.0 +Upgrade typing_extensions to 4.11.0 diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py index 351036faf7f..d60315a6adc 100644 --- a/src/pip/_vendor/typing_extensions.py +++ b/src/pip/_vendor/typing_extensions.py @@ -83,6 +83,7 @@ 'TypeAlias', 'TypeAliasType', 'TypeGuard', + 'TypeIs', 'TYPE_CHECKING', 'Never', 'NoReturn', @@ -146,27 +147,6 @@ def __repr__(self): _marker = _Sentinel() -def _check_generic(cls, parameters, elen=_marker): - """Check correct count for parameters of a generic cls (internal helper). - This gives a nice error message in case of count mismatch. - """ - if not elen: - raise TypeError(f"{cls} is not a generic class") - if elen is _marker: - if not hasattr(cls, "__parameters__") or not cls.__parameters__: - raise TypeError(f"{cls} is not a generic class") - elen = len(cls.__parameters__) - alen = len(parameters) - if alen != elen: - if hasattr(cls, "__parameters__"): - parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] - num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) - if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): - return - raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" - f" actual {alen}, expected {elen}") - - if sys.version_info >= (3, 10): def _should_collect_from_parameters(t): return isinstance( @@ -180,27 +160,6 @@ def _should_collect_from_parameters(t): return isinstance(t, typing._GenericAlias) and not t._special -def _collect_type_vars(types, typevar_types=None): - """Collect all type variable contained in types in order of - first appearance (lexicographic order). For example:: - - _collect_type_vars((T, List[S, T])) == (T, S) - """ - if typevar_types is None: - typevar_types = typing.TypeVar - tvars = [] - for t in types: - if ( - isinstance(t, typevar_types) and - t not in tvars and - not _is_unpack(t) - ): - tvars.append(t) - if _should_collect_from_parameters(t): - tvars.extend([t for t in t.__parameters__ if t not in tvars]) - return tuple(tvars) - - NoReturn = typing.NoReturn # Some unconstrained type variables. These are used by the container types. @@ -473,7 +432,7 @@ def clear_overloads(): "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", "__subclasshook__", "__orig_class__", "__init__", "__new__", - "__protocol_attrs__", "__callable_proto_members_only__", + "__protocol_attrs__", "__non_callable_proto_members__", "__match_args__", } @@ -521,6 +480,22 @@ def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') + def _type_check_issubclass_arg_1(arg): + """Raise TypeError if `arg` is not an instance of `type` + in `issubclass(arg, )`. + + In most cases, this is verified by type.__subclasscheck__. + Checking it again unnecessarily would slow down issubclass() checks, + so, we don't perform this check unless we absolutely have to. + + For various error paths, however, + we want to ensure that *this* error message is shown to the user + where relevant, rather than a typing.py-specific error message. + """ + if not isinstance(arg, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + # Inheriting from typing._ProtocolMeta isn't actually desirable, # but is necessary to allow typing.Protocol and typing_extensions.Protocol # to mix without getting TypeErrors about "metaclass conflict" @@ -551,11 +526,6 @@ def __init__(cls, *args, **kwargs): abc.ABCMeta.__init__(cls, *args, **kwargs) if getattr(cls, "_is_protocol", False): cls.__protocol_attrs__ = _get_protocol_attrs(cls) - # PEP 544 prohibits using issubclass() - # with protocols that have non-method members. - cls.__callable_proto_members_only__ = all( - callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ - ) def __subclasscheck__(cls, other): if cls is Protocol: @@ -564,26 +534,23 @@ def __subclasscheck__(cls, other): getattr(cls, '_is_protocol', False) and not _allow_reckless_class_checks() ): - if not isinstance(other, type): - # Same error message as for issubclass(1, int). - raise TypeError('issubclass() arg 1 must be a class') + if not getattr(cls, '_is_runtime_protocol', False): + _type_check_issubclass_arg_1(other) + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) if ( - not cls.__callable_proto_members_only__ + # this attribute is set by @runtime_checkable: + cls.__non_callable_proto_members__ and cls.__dict__.get("__subclasshook__") is _proto_hook ): - non_method_attrs = sorted( - attr for attr in cls.__protocol_attrs__ - if not callable(getattr(cls, attr, None)) - ) + _type_check_issubclass_arg_1(other) + non_method_attrs = sorted(cls.__non_callable_proto_members__) raise TypeError( "Protocols with non-method members don't support issubclass()." f" Non-method members: {str(non_method_attrs)[1:-1]}." ) - if not getattr(cls, '_is_runtime_protocol', False): - raise TypeError( - "Instance and class checks can only be used with " - "@runtime_checkable protocols" - ) return abc.ABCMeta.__subclasscheck__(cls, other) def __instancecheck__(cls, instance): @@ -610,7 +577,8 @@ def __instancecheck__(cls, instance): val = inspect.getattr_static(instance, attr) except AttributeError: break - if val is None and callable(getattr(cls, attr, None)): + # this attribute is set by @runtime_checkable: + if val is None and attr not in cls.__non_callable_proto_members__: break else: return True @@ -678,8 +646,58 @@ def __init_subclass__(cls, *args, **kwargs): cls.__init__ = _no_init +if sys.version_info >= (3, 13): + runtime_checkable = typing.runtime_checkable +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol. + + Such protocol can be used with isinstance() and issubclass(). + Raise TypeError if applied to a non-protocol class. + This allows a simple-minded structural check very similar to + one trick ponies in collections.abc such as Iterable. + + For example:: + + @runtime_checkable + class Closable(Protocol): + def close(self): ... + + assert isinstance(open('/some/file'), Closable) + + Warning: this will check only the presence of the required methods, + not their type signatures! + """ + if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): + raise TypeError('@runtime_checkable can be only applied to protocol classes,' + ' got %r' % cls) + cls._is_runtime_protocol = True + + # Only execute the following block if it's a typing_extensions.Protocol class. + # typing.Protocol classes don't need it. + if isinstance(cls, _ProtocolMeta): + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + # See gh-113320 for why we compute this attribute here, + # rather than in `_ProtocolMeta.__init__` + cls.__non_callable_proto_members__ = set() + for attr in cls.__protocol_attrs__: + try: + is_callable = callable(getattr(cls, attr, None)) + except Exception as e: + raise TypeError( + f"Failed to determine whether protocol member {attr!r} " + "is a method member" + ) from e + else: + if not is_callable: + cls.__non_callable_proto_members__.add(attr) + + return cls + + # The "runtime" alias exists for backwards compatibility. -runtime = runtime_checkable = typing.runtime_checkable +runtime = runtime_checkable # Our version of runtime-checkable protocols is faster on Python 3.8-3.11 @@ -774,7 +792,11 @@ def inner(func): return inner -if hasattr(typing, "ReadOnly"): +# Update this to something like >=3.13.0b1 if and when +# PEP 728 is implemented in CPython +_PEP_728_IMPLEMENTED = False + +if _PEP_728_IMPLEMENTED: # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" @@ -785,7 +807,8 @@ def inner(func): # Aaaand on 3.12 we add __orig_bases__ to TypedDict # to enable better runtime introspection. # On 3.13 we deprecate some odd ways of creating TypedDicts. - # PEP 705 proposes adding the ReadOnly[] qualifier. + # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. + # PEP 728 (still pending) makes more changes. TypedDict = typing.TypedDict _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict @@ -815,7 +838,7 @@ def _get_typeddict_qualifiers(annotation_type): break class _TypedDictMeta(type): - def __new__(cls, name, bases, ns, *, total=True): + def __new__(cls, name, bases, ns, *, total=True, closed=False): """Create new typed dict class object. This method is called when TypedDict is subclassed, @@ -860,6 +883,7 @@ def __new__(cls, name, bases, ns, *, total=True): optional_keys = set() readonly_keys = set() mutable_keys = set() + extra_items_type = None for base in bases: base_dict = base.__dict__ @@ -869,6 +893,26 @@ def __new__(cls, name, bases, ns, *, total=True): optional_keys.update(base_dict.get('__optional_keys__', ())) readonly_keys.update(base_dict.get('__readonly_keys__', ())) mutable_keys.update(base_dict.get('__mutable_keys__', ())) + base_extra_items_type = base_dict.get('__extra_items__', None) + if base_extra_items_type is not None: + extra_items_type = base_extra_items_type + + if closed and extra_items_type is None: + extra_items_type = Never + if closed and "__extra_items__" in own_annotations: + annotation_type = own_annotations.pop("__extra_items__") + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + if Required in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support " + "Required" + ) + if NotRequired in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support " + "NotRequired" + ) + extra_items_type = annotation_type annotations.update(own_annotations) for annotation_key, annotation_type in own_annotations.items(): @@ -883,11 +927,7 @@ def __new__(cls, name, bases, ns, *, total=True): else: optional_keys.add(annotation_key) if ReadOnly in qualifiers: - if annotation_key in mutable_keys: - raise TypeError( - f"Cannot override mutable key {annotation_key!r}" - " with read-only key" - ) + mutable_keys.discard(annotation_key) readonly_keys.add(annotation_key) else: mutable_keys.add(annotation_key) @@ -900,6 +940,8 @@ def __new__(cls, name, bases, ns, *, total=True): tp_dict.__mutable_keys__ = frozenset(mutable_keys) if not hasattr(tp_dict, '__total__'): tp_dict.__total__ = total + tp_dict.__closed__ = closed + tp_dict.__extra_items__ = extra_items_type return tp_dict __call__ = dict # static method @@ -913,7 +955,7 @@ def __subclasscheck__(cls, other): _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) @_ensure_subclassable(lambda bases: (_TypedDict,)) - def TypedDict(typename, fields=_marker, /, *, total=True, **kwargs): + def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs): """A simple typed namespace. At runtime it is equivalent to a plain dict. TypedDict creates a dictionary type such that a type checker will expect all @@ -973,6 +1015,9 @@ class Point2D(TypedDict): "using the functional syntax, pass an empty dictionary, e.g. " ) + example + "." warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + if closed is not False and closed is not True: + kwargs["closed"] = closed + closed = False fields = kwargs elif kwargs: raise TypeError("TypedDict takes either a dict or keyword arguments," @@ -994,7 +1039,7 @@ class Point2D(TypedDict): # Setting correct module is necessary to make typed dict classes pickleable. ns['__module__'] = module - td = _TypedDictMeta(typename, (), ns, total=total) + td = _TypedDictMeta(typename, (), ns, total=total, closed=closed) td.__orig_bases__ = (TypedDict,) return td @@ -1040,15 +1085,15 @@ def greet(name: str) -> None: return val -if hasattr(typing, "Required"): # 3.11+ +if hasattr(typing, "ReadOnly"): # 3.13+ get_type_hints = typing.get_type_hints -else: # <=3.10 +else: # <=3.13 # replaces _strip_annotations() def _strip_extras(t): """Strips Annotated, Required and NotRequired from a given type.""" if isinstance(t, _AnnotatedAlias): return _strip_extras(t.__origin__) - if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): return _strip_extras(t.__args__[0]) if isinstance(t, typing._GenericAlias): stripped_args = tuple(_strip_extras(a) for a in t.__args__) @@ -1768,6 +1813,98 @@ def is_str(val: Union[str, float]): PEP 647 (User-Defined Type Guards). """) +# 3.13+ +if hasattr(typing, 'TypeIs'): + TypeIs = typing.TypeIs +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeIs(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeGuard`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.8 +else: + class _TypeIsForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeIs = _TypeIsForm( + 'TypeIs', + doc="""Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeGuard`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """) + # Vendored from cpython typing._SpecialFrom class _SpecialForm(typing._Final, _root=True): @@ -2515,9 +2652,151 @@ def wrapper(*args, **kwargs): # counting generic parameters, so that when we subscript a generic, # the runtime doesn't try to substitute the Unpack with the subscripted type. if not hasattr(typing, "TypeVarTuple"): + def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if getattr(parameters[alen], '__default__', None) is not None: + return + + num_default_tv = sum(getattr(p, '__default__', None) + is not None for p in parameters) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + things = "arguments" if sys.version_info >= (3, 10) else "parameters" + raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}" + f" for {cls}; actual {alen}, expected {expect_val}") +else: + # Python 3.11+ + + def _check_generic(cls, parameters, elen): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if getattr(parameters[alen], '__default__', None) is not None: + return + + num_default_tv = sum(getattr(p, '__default__', None) + is not None for p in parameters) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments" + f" for {cls}; actual {alen}, expected {expect_val}") + +typing._check_generic = _check_generic + +# Python 3.11+ _collect_type_vars was renamed to _collect_parameters +if hasattr(typing, '_collect_type_vars'): + def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + # required TypeVarLike cannot appear after TypeVarLike with default + default_encountered = False + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + if getattr(t, '__default__', None) is not None: + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') + + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + typing._collect_type_vars = _collect_type_vars - typing._check_generic = _check_generic +else: + def _collect_parameters(args): + """Collect all type variables and parameter specifications in args + in order of first appearance (lexicographic order). + + For example:: + + assert _collect_parameters((T, Callable[P, T])) == (T, P) + """ + parameters = [] + # required TypeVarLike cannot appear after TypeVarLike with default + default_encountered = False + for t in args: + if isinstance(t, type): + # We don't want __parameters__ descriptor of a bare Python class. + pass + elif isinstance(t, tuple): + # `t` might be a tuple, when `ParamSpec` is substituted with + # `[T, int]`, or `[int, *Ts]`, etc. + for x in t: + for collected in _collect_parameters([x]): + if collected not in parameters: + parameters.append(collected) + elif hasattr(t, '__typing_subst__'): + if t not in parameters: + if getattr(t, '__default__', None) is not None: + default_encountered = True + elif default_encountered: + raise TypeError(f'Type parameter {t!r} without a default' + ' follows type parameter with a default') + + parameters.append(t) + else: + for x in getattr(t, '__parameters__', ()): + if x not in parameters: + parameters.append(x) + + return tuple(parameters) + typing._collect_parameters = _collect_parameters # Backport typing.NamedTuple as it exists in Python 3.13. # In 3.11, the ability to define generic `NamedTuple`s was supported. diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index bba94517734..04ccb0c5aa0 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -14,7 +14,7 @@ requests==2.31.0 urllib3==1.26.18 rich==13.7.1 pygments==2.17.2 - typing_extensions==4.9.0 + typing_extensions==4.11.0 resolvelib==1.0.1 setuptools==69.1.1 six==1.16.0 From d24ddc3267fded605ec8bf248f1d5923941d6bc8 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 3 May 2024 21:06:03 +0100 Subject: [PATCH 7/7] Upgrade setuptools to 69.5.1 --- news/setuptools.vendor.rst | 2 +- src/pip/_vendor/pkg_resources/__init__.py | 68 ++++++++++------------- src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 30 insertions(+), 42 deletions(-) diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst index 135850e0a97..70703ddc15e 100644 --- a/news/setuptools.vendor.rst +++ b/news/setuptools.vendor.rst @@ -1 +1 @@ -Upgrade setuptools to 69.1.1 +Upgrade setuptools to 69.5.1 diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 89f49570f4a..417a537d6f6 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -27,7 +27,7 @@ import time import re import types -from typing import Protocol +from typing import List, Protocol import zipfile import zipimport import warnings @@ -85,9 +85,7 @@ require = None working_set = None add_activation_listener = None -resources_stream = None cleanup_resources = None -resource_dir = None resource_stream = None set_extraction_path = None resource_isdir = None @@ -491,19 +489,6 @@ def compatible_platforms(provided, required): return False -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - - -# backward compatibility -run_main = run_script - - def get_distribution(dist): """Return a current distribution object for a Requirement or string""" if isinstance(dist, str): @@ -531,7 +516,7 @@ def get_entry_info(dist, group, name): class IMetadataProvider(Protocol): - def has_metadata(self, name): + def has_metadata(self, name) -> bool: """Does the package's distribution contain the named metadata?""" def get_metadata(self, name): @@ -543,7 +528,7 @@ def get_metadata_lines(self, name): Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.""" - def metadata_isdir(self, name): + def metadata_isdir(self, name) -> bool: """Is the named metadata a directory? (like ``os.path.isdir()``)""" def metadata_listdir(self, name): @@ -566,8 +551,8 @@ def get_resource_stream(self, manager, resource_name): `manager` must be an ``IResourceManager``""" - def get_resource_string(self, manager, resource_name): - """Return a string containing the contents of `resource_name` + def get_resource_string(self, manager, resource_name) -> bytes: + """Return the contents of `resource_name` as :obj:`bytes` `manager` must be an ``IResourceManager``""" @@ -1203,8 +1188,8 @@ def resource_stream(self, package_or_requirement, resource_name): self, resource_name ) - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" + def resource_string(self, package_or_requirement, resource_name) -> bytes: + """Return specified resource as :obj:`bytes`""" return get_provider(package_or_requirement).get_resource_string( self, resource_name ) @@ -1339,7 +1324,7 @@ def set_extraction_path(self, path): self.extraction_path = path - def cleanup_resources(self, force=False): + def cleanup_resources(self, force=False) -> List[str]: """ Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. @@ -1351,6 +1336,7 @@ def cleanup_resources(self, force=False): directory used for extractions. """ # XXX + return [] def get_default_cache(): @@ -1479,7 +1465,7 @@ def get_resource_filename(self, manager, resource_name): def get_resource_stream(self, manager, resource_name): return io.BytesIO(self.get_resource_string(manager, resource_name)) - def get_resource_string(self, manager, resource_name): + def get_resource_string(self, manager, resource_name) -> bytes: return self._get(self._fn(self.module_path, resource_name)) def has_resource(self, resource_name): @@ -1488,9 +1474,9 @@ def has_resource(self, resource_name): def _get_metadata_path(self, name): return self._fn(self.egg_info, name) - def has_metadata(self, name): + def has_metadata(self, name) -> bool: if not self.egg_info: - return self.egg_info + return False path = self._get_metadata_path(name) return self._has(path) @@ -1514,8 +1500,8 @@ def get_metadata_lines(self, name): def resource_isdir(self, resource_name): return self._isdir(self._fn(self.module_path, resource_name)) - def metadata_isdir(self, name): - return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + def metadata_isdir(self, name) -> bool: + return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name))) def resource_listdir(self, resource_name): return self._listdir(self._fn(self.module_path, resource_name)) @@ -1554,12 +1540,12 @@ def run_script(self, script_name, namespace): script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) - def _has(self, path): + def _has(self, path) -> bool: raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) - def _isdir(self, path): + def _isdir(self, path) -> bool: raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) @@ -1649,7 +1635,7 @@ def _validate_resource_path(path): DeprecationWarning, ) - def _get(self, path): + def _get(self, path) -> bytes: if hasattr(self.loader, 'get_data'): return self.loader.get_data(path) raise NotImplementedError( @@ -1694,10 +1680,10 @@ def _set_egg(self, path): class DefaultProvider(EggProvider): """Provides access to package resources in the filesystem""" - def _has(self, path): + def _has(self, path) -> bool: return os.path.exists(path) - def _isdir(self, path): + def _isdir(self, path) -> bool: return os.path.isdir(path) def _listdir(self, path): @@ -1706,7 +1692,7 @@ def _listdir(self, path): def get_resource_stream(self, manager, resource_name): return open(self._fn(self.module_path, resource_name), 'rb') - def _get(self, path): + def _get(self, path) -> bytes: with open(path, 'rb') as stream: return stream.read() @@ -1731,8 +1717,8 @@ class EmptyProvider(NullProvider): _isdir = _has = lambda self, path: False - def _get(self, path): - return '' + def _get(self, path) -> bytes: + return b'' def _listdir(self, path): return [] @@ -1939,11 +1925,11 @@ def _index(self): self._dirindex = ind return ind - def _has(self, fspath): + def _has(self, fspath) -> bool: zip_path = self._zipinfo_name(fspath) return zip_path in self.zipinfo or zip_path in self._index() - def _isdir(self, fspath): + def _isdir(self, fspath) -> bool: return self._zipinfo_name(fspath) in self._index() def _listdir(self, fspath): @@ -1977,7 +1963,7 @@ def __init__(self, path): def _get_metadata_path(self, name): return self.path - def has_metadata(self, name): + def has_metadata(self, name) -> bool: return name == 'PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): @@ -3207,7 +3193,9 @@ def _find_adapter(registry, ob): for t in types: if t in registry: return registry[t] - return None + # _find_adapter would previously return None, and immediately be called. + # So we're raising a TypeError to keep backward compatibility if anyone depended on that behaviour. + raise TypeError(f"Could not find adapter for {registry} and {ob}") def ensure_directory(path): diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 04ccb0c5aa0..e9694adc037 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==13.7.1 pygments==2.17.2 typing_extensions==4.11.0 resolvelib==1.0.1 -setuptools==69.1.1 +setuptools==69.5.1 six==1.16.0 tenacity==8.2.3 tomli==2.0.1