From 3f9f359c0e563d30a60f0d1e4633eea9f964b610 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 15:01:26 -0400 Subject: [PATCH 01/12] Update pip and pip-tools to latest versions - Update `pip => 20.0.2` - Update `pip-tools => 5.0.0` - Update relevant patches and re-apply Signed-off-by: Dan Ryan --- pipenv/patched/notpip/__init__.py | 19 +- pipenv/patched/notpip/__main__.py | 2 +- pipenv/patched/notpip/_internal/__init__.py | 16 + pipenv/patched/notpip/_internal/build_env.py | 2 +- pipenv/patched/notpip/_internal/cache.py | 140 ++- .../notpip/_internal/cli/autocompletion.py | 43 +- .../notpip/_internal/cli/base_command.py | 43 +- .../notpip/_internal/cli/cmdoptions.py | 72 +- .../notpip/_internal/cli/command_context.py | 13 +- pipenv/patched/notpip/_internal/cli/main.py | 75 ++ .../notpip/_internal/cli/req_command.py | 61 +- .../notpip/_internal/commands/debug.py | 35 +- .../notpip/_internal/commands/download.py | 27 +- .../notpip/_internal/commands/install.py | 276 +++-- .../patched/notpip/_internal/commands/list.py | 2 +- .../notpip/_internal/commands/wheel.py | 53 +- .../patched/notpip/_internal/configuration.py | 2 +- .../_internal/distributions/__init__.py | 2 +- .../notpip/_internal/distributions/base.py | 15 +- .../_internal/distributions/installed.py | 12 +- .../notpip/_internal/distributions/sdist.py | 104 ++ .../notpip/_internal/distributions/wheel.py | 28 +- .../notpip/_internal/index/__init__.py | 2 + .../notpip/_internal/index/collector.py | 544 +++++++++ .../notpip/_internal/index/package_finder.py | 1049 +++++++++++++++++ .../notpip/_internal/legacy_resolve.py | 59 +- pipenv/patched/notpip/_internal/locations.py | 78 +- pipenv/patched/notpip/_internal/main.py | 51 +- .../notpip/_internal/models/candidate.py | 17 +- .../notpip/_internal/models/format_control.py | 10 +- .../patched/notpip/_internal/models/link.py | 10 +- .../patched/notpip/_internal/models/scheme.py | 25 + .../notpip/_internal/models/search_scope.py | 8 +- .../notpip/_internal/models/target_python.py | 13 +- .../patched/notpip/_internal/models/wheel.py | 78 ++ .../patched/notpip/_internal/network/cache.py | 6 + .../notpip/_internal/network/download.py | 200 ++++ .../notpip/_internal/network/session.py | 41 +- .../patched/notpip/_internal/network/utils.py | 48 + .../notpip/_internal/operations/check.py | 2 +- .../notpip/_internal/operations/freeze.py | 38 +- .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 52 + .../_internal/operations/install/legacy.py | 129 ++ .../_internal/operations/install/wheel.py | 615 ++++++++++ .../notpip/_internal/operations/prepare.py | 382 +++++- pipenv/patched/notpip/_internal/pep425tags.py | 516 ++------ pipenv/patched/notpip/_internal/pyproject.py | 31 +- .../patched/notpip/_internal/req/__init__.py | 30 +- .../notpip/_internal/req/constructors.py | 4 +- .../patched/notpip/_internal/req/req_file.py | 381 ++++-- .../notpip/_internal/req/req_install.py | 538 ++++----- .../patched/notpip/_internal/req/req_set.py | 7 +- .../notpip/_internal/req/req_tracker.py | 112 +- .../notpip/_internal/req/req_uninstall.py | 4 +- .../notpip/_internal/self_outdated_check.py | 16 +- .../patched/notpip/_internal/utils/appdirs.py | 265 +---- .../patched/notpip/_internal/utils/compat.py | 68 +- .../notpip/_internal/utils/distutils_args.py | 48 + .../notpip/_internal/utils/entrypoints.py | 31 + .../notpip/_internal/utils/filesystem.py | 62 +- .../patched/notpip/_internal/utils/glibc.py | 31 +- .../patched/notpip/_internal/utils/hashes.py | 4 +- .../notpip/_internal/utils/marker_files.py | 4 +- pipenv/patched/notpip/_internal/utils/misc.py | 66 +- .../notpip/_internal/utils/pkg_resources.py | 44 + .../_internal/utils/setuptools_build.py | 152 ++- .../notpip/_internal/utils/subprocess.py | 2 +- .../notpip/_internal/utils/temp_dir.py | 100 +- .../patched/notpip/_internal/utils/typing.py | 9 + pipenv/patched/notpip/_internal/utils/ui.py | 6 +- .../notpip/_internal/utils/virtualenv.py | 115 +- .../patched/notpip/_internal/utils/wheel.py | 225 ++++ pipenv/patched/notpip/_internal/vcs/git.py | 19 +- .../notpip/_internal/vcs/versioncontrol.py | 45 +- .../patched/notpip/_internal/wheel_builder.py | 305 +++++ pipenv/patched/notpip/_vendor/README.rst | 151 +++ pipenv/patched/notpip/_vendor/appdirs.py | 47 +- .../patched/notpip/_vendor/cachecontrol.pyi | 1 + .../notpip/_vendor/cachecontrol/__init__.py | 2 +- .../notpip/_vendor/cachecontrol/adapter.py | 2 +- .../notpip/_vendor/cachecontrol/controller.py | 11 +- .../notpip/_vendor/cachecontrol/serialize.py | 4 +- .../notpip/_vendor/cachecontrol/wrapper.py | 2 +- pipenv/patched/notpip/_vendor/certifi.pyi | 1 + .../notpip/_vendor/certifi/__init__.py | 2 +- .../patched/notpip/_vendor/certifi/cacert.pem | 44 + pipenv/patched/notpip/_vendor/chardet.pyi | 1 + pipenv/patched/notpip/_vendor/colorama.pyi | 1 + .../notpip/_vendor/colorama/__init__.py | 2 +- pipenv/patched/notpip/_vendor/distlib.pyi | 1 + .../notpip/_vendor/distlib/__init__.py | 2 +- .../_vendor/distlib/_backport/sysconfig.py | 8 +- .../notpip/_vendor/distlib/database.py | 2 +- .../notpip/_vendor/distlib/locators.py | 21 +- .../patched/notpip/_vendor/distlib/scripts.py | 25 +- pipenv/patched/notpip/_vendor/distlib/t32.exe | Bin 92672 -> 96768 bytes pipenv/patched/notpip/_vendor/distlib/t64.exe | Bin 102912 -> 105984 bytes pipenv/patched/notpip/_vendor/distlib/util.py | 5 +- pipenv/patched/notpip/_vendor/distlib/w32.exe | Bin 89088 -> 90112 bytes pipenv/patched/notpip/_vendor/distlib/w64.exe | Bin 99840 -> 99840 bytes .../patched/notpip/_vendor/distlib/wheel.py | 2 +- pipenv/patched/notpip/_vendor/distro.pyi | 1 + pipenv/patched/notpip/_vendor/html5lib.pyi | 1 + pipenv/patched/notpip/_vendor/idna.pyi | 1 + pipenv/patched/notpip/_vendor/ipaddress.py | 5 +- pipenv/patched/notpip/_vendor/ipaddress.pyi | 1 + pipenv/patched/notpip/_vendor/msgpack.pyi | 1 + pipenv/patched/notpip/_vendor/packaging.pyi | 1 + .../notpip/_vendor/packaging/LICENSE.APACHE | 2 +- .../notpip/_vendor/packaging/__about__.py | 2 +- .../notpip/_vendor/packaging/_compat.py | 9 +- .../notpip/_vendor/packaging/_structures.py | 26 +- .../notpip/_vendor/packaging/_typing.py | 39 + .../notpip/_vendor/packaging/markers.py | 54 +- .../patched/notpip/_vendor/packaging/py.typed | 0 .../notpip/_vendor/packaging/requirements.py | 9 +- .../notpip/_vendor/packaging/specifiers.py | 168 ++- .../patched/notpip/_vendor/packaging/tags.py | 542 +++++++-- .../patched/notpip/_vendor/packaging/utils.py | 13 +- .../notpip/_vendor/packaging/version.py | 151 ++- pipenv/patched/notpip/_vendor/pep517.pyi | 1 + .../patched/notpip/_vendor/pkg_resources.pyi | 1 + .../notpip/_vendor/pkg_resources/__init__.py | 7 +- pipenv/patched/notpip/_vendor/progress.pyi | 1 + pipenv/patched/notpip/_vendor/pyparsing.py | 280 ++++- pipenv/patched/notpip/_vendor/pyparsing.pyi | 1 + pipenv/patched/notpip/_vendor/pytoml.pyi | 1 + pipenv/patched/notpip/_vendor/requests.pyi | 1 + pipenv/patched/notpip/_vendor/retrying.pyi | 1 + pipenv/patched/notpip/_vendor/six.LICENSE | 2 +- pipenv/patched/notpip/_vendor/six.py | 70 +- .../patched/notpip/_vendor/six/__init__.pyi | 1 + .../notpip/_vendor/six/moves/__init__.pyi | 1 + .../notpip/_vendor/six/moves/configparser.pyi | 1 + pipenv/patched/notpip/_vendor/urllib3.pyi | 1 + .../notpip/_vendor/urllib3/__init__.py | 2 +- .../notpip/_vendor/urllib3/connection.py | 4 +- .../notpip/_vendor/urllib3/connectionpool.py | 14 +- .../urllib3/contrib/_appengine_environ.py | 24 +- .../contrib/_securetransport/bindings.py | 1 + .../_vendor/urllib3/contrib/appengine.py | 7 - .../_vendor/urllib3/contrib/ntlmpool.py | 4 +- .../urllib3/contrib/securetransport.py | 19 +- .../notpip/_vendor/urllib3/exceptions.py | 2 +- .../ssl_match_hostname/_implementation.py | 6 +- .../notpip/_vendor/urllib3/util/connection.py | 2 +- .../notpip/_vendor/urllib3/util/request.py | 4 +- .../notpip/_vendor/urllib3/util/timeout.py | 2 +- .../notpip/_vendor/urllib3/util/url.py | 7 +- pipenv/patched/notpip/_vendor/vendor.txt | 20 +- .../patched/notpip/_vendor/webencodings.pyi | 1 + pipenv/patched/piptools/_compat/__init__.py | 4 +- pipenv/patched/piptools/_compat/pip_compat.py | 51 +- pipenv/patched/piptools/cache.py | 23 +- pipenv/patched/piptools/locations.py | 1 + pipenv/patched/piptools/logging.py | 6 + pipenv/patched/piptools/repositories/local.py | 17 +- pipenv/patched/piptools/repositories/pypi.py | 214 ++-- pipenv/patched/piptools/resolver.py | 43 +- pipenv/patched/piptools/scripts/compile.py | 114 +- pipenv/patched/piptools/scripts/sync.py | 111 +- pipenv/patched/piptools/sync.py | 29 +- pipenv/patched/piptools/utils.py | 59 +- pipenv/patched/piptools/writer.py | 77 +- 165 files changed, 7816 insertions(+), 2526 deletions(-) create mode 100644 pipenv/patched/notpip/_internal/cli/main.py create mode 100644 pipenv/patched/notpip/_internal/distributions/sdist.py create mode 100644 pipenv/patched/notpip/_internal/index/__init__.py create mode 100644 pipenv/patched/notpip/_internal/index/collector.py create mode 100644 pipenv/patched/notpip/_internal/index/package_finder.py create mode 100644 pipenv/patched/notpip/_internal/models/scheme.py create mode 100644 pipenv/patched/notpip/_internal/models/wheel.py create mode 100644 pipenv/patched/notpip/_internal/network/download.py create mode 100644 pipenv/patched/notpip/_internal/network/utils.py create mode 100644 pipenv/patched/notpip/_internal/operations/install/__init__.py create mode 100644 pipenv/patched/notpip/_internal/operations/install/editable_legacy.py create mode 100644 pipenv/patched/notpip/_internal/operations/install/legacy.py create mode 100644 pipenv/patched/notpip/_internal/operations/install/wheel.py create mode 100644 pipenv/patched/notpip/_internal/utils/distutils_args.py create mode 100644 pipenv/patched/notpip/_internal/utils/entrypoints.py create mode 100644 pipenv/patched/notpip/_internal/utils/pkg_resources.py create mode 100644 pipenv/patched/notpip/_internal/utils/wheel.py create mode 100644 pipenv/patched/notpip/_internal/wheel_builder.py create mode 100644 pipenv/patched/notpip/_vendor/README.rst create mode 100644 pipenv/patched/notpip/_vendor/cachecontrol.pyi create mode 100644 pipenv/patched/notpip/_vendor/certifi.pyi create mode 100644 pipenv/patched/notpip/_vendor/chardet.pyi create mode 100644 pipenv/patched/notpip/_vendor/colorama.pyi create mode 100644 pipenv/patched/notpip/_vendor/distlib.pyi create mode 100644 pipenv/patched/notpip/_vendor/distro.pyi create mode 100644 pipenv/patched/notpip/_vendor/html5lib.pyi create mode 100644 pipenv/patched/notpip/_vendor/idna.pyi create mode 100644 pipenv/patched/notpip/_vendor/ipaddress.pyi create mode 100644 pipenv/patched/notpip/_vendor/msgpack.pyi create mode 100644 pipenv/patched/notpip/_vendor/packaging.pyi create mode 100644 pipenv/patched/notpip/_vendor/packaging/_typing.py create mode 100644 pipenv/patched/notpip/_vendor/packaging/py.typed create mode 100644 pipenv/patched/notpip/_vendor/pep517.pyi create mode 100644 pipenv/patched/notpip/_vendor/pkg_resources.pyi create mode 100644 pipenv/patched/notpip/_vendor/progress.pyi create mode 100644 pipenv/patched/notpip/_vendor/pyparsing.pyi create mode 100644 pipenv/patched/notpip/_vendor/pytoml.pyi create mode 100644 pipenv/patched/notpip/_vendor/requests.pyi create mode 100644 pipenv/patched/notpip/_vendor/retrying.pyi create mode 100644 pipenv/patched/notpip/_vendor/six/__init__.pyi create mode 100644 pipenv/patched/notpip/_vendor/six/moves/__init__.pyi create mode 100644 pipenv/patched/notpip/_vendor/six/moves/configparser.pyi create mode 100644 pipenv/patched/notpip/_vendor/urllib3.pyi create mode 100644 pipenv/patched/notpip/_vendor/webencodings.pyi mode change 100644 => 100755 pipenv/patched/piptools/scripts/compile.py mode change 100644 => 100755 pipenv/patched/piptools/scripts/sync.py diff --git a/pipenv/patched/notpip/__init__.py b/pipenv/patched/notpip/__init__.py index a487794a9b..7230152a8f 100644 --- a/pipenv/patched/notpip/__init__.py +++ b/pipenv/patched/notpip/__init__.py @@ -1 +1,18 @@ -__version__ = "19.3.1" +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + + +__version__ = "20.0.2" + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pipenv.patched.notpip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/pipenv/patched/notpip/__main__.py b/pipenv/patched/notpip/__main__.py index 36a4800f13..56f669fafa 100644 --- a/pipenv/patched/notpip/__main__.py +++ b/pipenv/patched/notpip/__main__.py @@ -13,7 +13,7 @@ path = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, path) -from pipenv.patched.notpip._internal.main import main as _main # isort:skip # noqa +from pipenv.patched.notpip._internal.cli.main import main as _main # isort:skip # noqa if __name__ == '__main__': sys.exit(_main()) diff --git a/pipenv/patched/notpip/_internal/__init__.py b/pipenv/patched/notpip/_internal/__init__.py index 18d727b653..18f675de9a 100644 --- a/pipenv/patched/notpip/_internal/__init__.py +++ b/pipenv/patched/notpip/_internal/__init__.py @@ -1,2 +1,18 @@ #!/usr/bin/env python import pipenv.patched.notpip._internal.utils.inject_securetransport # noqa +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pipenv.patched.notpip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/pipenv/patched/notpip/_internal/build_env.py b/pipenv/patched/notpip/_internal/build_env.py index 7760b5210a..71fa326e61 100644 --- a/pipenv/patched/notpip/_internal/build_env.py +++ b/pipenv/patched/notpip/_internal/build_env.py @@ -23,7 +23,7 @@ if MYPY_CHECK_RUNNING: from typing import Tuple, Set, Iterable, Optional, List - from pipenv.patched.notpip._internal.index import PackageFinder + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder logger = logging.getLogger(__name__) diff --git a/pipenv/patched/notpip/_internal/cache.py b/pipenv/patched/notpip/_internal/cache.py index 9d241eca8f..c6ed6346e0 100644 --- a/pipenv/patched/notpip/_internal/cache.py +++ b/pipenv/patched/notpip/_internal/cache.py @@ -4,28 +4,38 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -import errno import hashlib +import json import logging import os +from pipenv.patched.notpip._vendor.packaging.tags import interpreter_name, interpreter_version from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name +from pipenv.patched.notpip._internal.exceptions import InvalidWheelFilename from pipenv.patched.notpip._internal.models.link import Link -from pipenv.patched.notpip._internal.utils.compat import expanduser +from pipenv.patched.notpip._internal.models.wheel import Wheel from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING from pipenv.patched.notpip._internal.utils.urls import path_to_url -from pipenv.patched.notpip._internal.wheel import InvalidWheelFilename, Wheel if MYPY_CHECK_RUNNING: - from typing import Optional, Set, List, Any - from pipenv.patched.notpip._internal.index import FormatControl - from pipenv.patched.notpip._internal.pep425tags import Pep425Tag + from typing import Optional, Set, List, Any, Dict + + from pipenv.patched.notpip._vendor.packaging.tags import Tag + + from pipenv.patched.notpip._internal.models.format_control import FormatControl logger = logging.getLogger(__name__) +def _hash_dict(d): + # type: (Dict[str, str]) -> str + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + class Cache(object): """An abstract class - provides cache directories for data from links @@ -40,16 +50,19 @@ class Cache(object): def __init__(self, cache_dir, format_control, allowed_formats): # type: (str, FormatControl, Set[str]) -> None super(Cache, self).__init__() - self.cache_dir = expanduser(cache_dir) if cache_dir else None + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None self.format_control = format_control self.allowed_formats = allowed_formats _valid_formats = {"source", "binary"} assert self.allowed_formats.union(_valid_formats) == _valid_formats - def _get_cache_path_parts(self, link): + def _get_cache_path_parts_legacy(self, link): # type: (Link) -> List[str] """Get parts of part that must be os.path.joined with cache_dir + + Legacy cache key (pip < 20) for compatibility with older caches. """ # We want to generate an url to use as our cache key, we don't want to @@ -73,30 +86,72 @@ def _get_cache_path_parts(self, link): return parts - def _get_candidates(self, link, package_name): + def _get_cache_path_parts(self, link): + # type: (Link) -> List[str] + """Get parts of part that must be os.path.joined with cache_dir + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link, canonical_package_name): # type: (Link, Optional[str]) -> List[Any] can_not_cache = ( not self.cache_dir or - not package_name or + not canonical_package_name or not link ) if can_not_cache: return [] - canonical_name = canonicalize_name(package_name) formats = self.format_control.get_allowed_formats( - canonical_name + canonical_package_name ) if not self.allowed_formats.intersection(formats): return [] - root = self.get_path_for_link(link) - try: - return os.listdir(root) - except OSError as err: - if err.errno in {errno.ENOENT, errno.ENOTDIR}: - return [] - raise + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + # TODO remove legacy path lookup in pip>=21 + legacy_path = self.get_path_for_link_legacy(link) + if os.path.isdir(legacy_path): + for candidate in os.listdir(legacy_path): + candidates.append((candidate, legacy_path)) + return candidates + + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + raise NotImplementedError() def get_path_for_link(self, link): # type: (Link) -> str @@ -108,7 +163,7 @@ def get( self, link, # type: Link package_name, # type: Optional[str] - supported_tags, # type: List[Pep425Tag] + supported_tags, # type: List[Tag] ): # type: (...) -> Link """Returns a link to a cached item if it exists, otherwise returns the @@ -116,13 +171,6 @@ def get( """ raise NotImplementedError() - def _link_for_candidate(self, link, candidate): - # type: (Link, str) -> Link - root = self.get_path_for_link(link) - path = os.path.join(root, candidate) - - return Link(path_to_url(path)) - def cleanup(self): # type: () -> None pass @@ -138,6 +186,11 @@ def __init__(self, cache_dir, format_control): cache_dir, format_control, {"binary"} ) + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + parts = self._get_cache_path_parts_legacy(link) + return os.path.join(self.cache_dir, "wheels", *parts) + def get_path_for_link(self, link): # type: (Link) -> str """Return a directory to store cached wheels for link @@ -163,27 +216,46 @@ def get( self, link, # type: Link package_name, # type: Optional[str] - supported_tags, # type: List[Pep425Tag] + supported_tags, # type: List[Tag] ): # type: (...) -> Link candidates = [] - for wheel_name in self._get_candidates(link, package_name): + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates( + link, canonical_package_name + ): try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel {} for {} as it " + "does not match the expected distribution name {}.".format( + wheel_name, link, package_name + ) + ) + continue if not wheel.supported(supported_tags): # Built for a different python/arch/etc continue candidates.append( - (wheel.support_index_min(supported_tags), wheel_name) + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) ) if not candidates: return link - return self._link_for_candidate(link, min(candidates)[1]) + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) class EphemWheelCache(SimpleWheelCache): @@ -218,6 +290,10 @@ def __init__(self, cache_dir, format_control): self._wheel_cache = SimpleWheelCache(cache_dir, format_control) self._ephem_cache = EphemWheelCache(format_control) + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + return self._wheel_cache.get_path_for_link_legacy(link) + def get_path_for_link(self, link): # type: (Link) -> str return self._wheel_cache.get_path_for_link(link) @@ -230,7 +306,7 @@ def get( self, link, # type: Link package_name, # type: Optional[str] - supported_tags, # type: List[Pep425Tag] + supported_tags, # type: List[Tag] ): # type: (...) -> Link retval = self._wheel_cache.get( diff --git a/pipenv/patched/notpip/_internal/cli/autocompletion.py b/pipenv/patched/notpip/_internal/cli/autocompletion.py index d8e657096c..af4d1a519e 100644 --- a/pipenv/patched/notpip/_internal/cli/autocompletion.py +++ b/pipenv/patched/notpip/_internal/cli/autocompletion.py @@ -1,19 +1,22 @@ """Logic that powers autocompletion installed by ``pip completion``. """ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import optparse import os import sys +from itertools import chain from pipenv.patched.notpip._internal.cli.main_parser import create_main_parser from pipenv.patched.notpip._internal.commands import commands_dict, create_command from pipenv.patched.notpip._internal.utils.misc import get_installed_distributions +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Iterable, List, Optional def autocomplete(): + # type: () -> None """Entry Point for completion of main and subcommand options. """ # Don't complete if user hasn't sourced bash_completion file. @@ -26,17 +29,18 @@ def autocomplete(): except IndexError: current = '' + parser = create_main_parser() subcommands = list(commands_dict) options = [] - # subcommand - try: - subcommand_name = [w for w in cwords if w in subcommands][0] - except IndexError: - subcommand_name = None - parser = create_main_parser() + # subcommand + subcommand_name = None # type: Optional[str] + for word in cwords: + if word in subcommands: + subcommand_name = word + break # subcommand options - if subcommand_name: + if subcommand_name is not None: # special case: 'help' subcommand has no options if subcommand_name == 'help': sys.exit(1) @@ -76,8 +80,8 @@ def autocomplete(): # get completion files and directories if ``completion_type`` is # ````, ```` or ```` if completion_type: - options = auto_complete_paths(current, completion_type) - options = ((opt, 0) for opt in options) + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] for option in options: opt_label = option[0] # append '=' to options which require args @@ -89,22 +93,25 @@ def autocomplete(): opts = [i.option_list for i in parser.option_groups] opts.append(parser.option_list) - opts = (o for it in opts for o in it) + flattened_opts = chain.from_iterable(opts) if current.startswith('-'): - for opt in opts: + for opt in flattened_opts: if opt.help != optparse.SUPPRESS_HELP: subcommands += opt._long_opts + opt._short_opts else: # get completion type given cwords and all available options - completion_type = get_path_completion_type(cwords, cword, opts) + completion_type = get_path_completion_type(cwords, cword, + flattened_opts) if completion_type: - subcommands = auto_complete_paths(current, completion_type) + subcommands = list(auto_complete_paths(current, + completion_type)) print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1) def get_path_completion_type(cwords, cword, opts): + # type: (List[str], int, Iterable[Any]) -> Optional[str] """Get the type of path completion (``file``, ``dir``, ``path`` or None) :param cwords: same as the environmental variable ``COMP_WORDS`` @@ -113,7 +120,7 @@ def get_path_completion_type(cwords, cword, opts): :return: path completion type (``file``, ``dir``, ``path`` or None) """ if cword < 2 or not cwords[cword - 2].startswith('-'): - return + return None for opt in opts: if opt.help == optparse.SUPPRESS_HELP: continue @@ -123,9 +130,11 @@ def get_path_completion_type(cwords, cword, opts): x in ('path', 'file', 'dir') for x in opt.metavar.split('/')): return opt.metavar + return None def auto_complete_paths(current, completion_type): + # type: (str, str) -> Iterable[str] """If ``completion_type`` is ``file`` or ``path``, list all regular files and directories starting with ``current``; otherwise only list directories starting with ``current``. diff --git a/pipenv/patched/notpip/_internal/cli/base_command.py b/pipenv/patched/notpip/_internal/cli/base_command.py index dd818fe0b6..f8eeb088c9 100644 --- a/pipenv/patched/notpip/_internal/cli/base_command.py +++ b/pipenv/patched/notpip/_internal/cli/base_command.py @@ -31,8 +31,10 @@ UninstallationError, ) from pipenv.patched.notpip._internal.utils.deprecation import deprecated +from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner from pipenv.patched.notpip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging -from pipenv.patched.notpip._internal.utils.misc import get_prog +from pipenv.patched.notpip._internal.utils.misc import get_prog, normalize_path +from pipenv.patched.notpip._internal.utils.temp_dir import global_tempdir_manager from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING from pipenv.patched.notpip._internal.utils.virtualenv import running_under_virtualenv @@ -92,7 +94,7 @@ def run(self, options, args): raise NotImplementedError def parse_args(self, args): - # type: (List[str]) -> Tuple + # type: (List[str]) -> Tuple[Any, Any] # factored out for testability return self.parser.parse_args(args) @@ -106,6 +108,10 @@ def main(self, args): def _main(self, args): # type: (List[str]) -> int + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + options, args = self.parse_args(args) # Set verbosity so that it can be used elsewhere. @@ -117,7 +123,10 @@ def _main(self, args): user_log_file=options.log, ) - if sys.version_info[:2] == (2, 7): + if ( + sys.version_info[:2] == (2, 7) and + not options.no_python_version_warning + ): message = ( "A future version of pip will drop support for Python 2.7. " "More details about Python 2 support in pip, can be found at " @@ -125,12 +134,23 @@ def _main(self, args): ) if platform.python_implementation() == "CPython": message = ( - "Python 2.7 will reach the end of its life on January " + "Python 2.7 reached the end of its life on January " "1st, 2020. Please upgrade your Python as Python 2.7 " - "won't be maintained after that date. " + "is no longer maintained. " ) + message deprecated(message, replacement=None, gone_in=None) + if options.skip_requirements_regex: + deprecated( + "--skip-requirements-regex is unsupported and will be removed", + replacement=( + "manage requirements/constraints files explicitly, " + "possibly generating them from metadata" + ), + gone_in="20.1", + issue=7297, + ) + # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. # This also affects isolated builds and it should. @@ -149,6 +169,19 @@ def _main(self, args): ) sys.exit(VIRTUALENV_NOT_FOUND) + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + try: status = self.run(options, args) # FIXME: all commands should return an exit status diff --git a/pipenv/patched/notpip/_internal/cli/cmdoptions.py b/pipenv/patched/notpip/_internal/cli/cmdoptions.py index ba6166d93e..9e321955b0 100644 --- a/pipenv/patched/notpip/_internal/cli/cmdoptions.py +++ b/pipenv/patched/notpip/_internal/cli/cmdoptions.py @@ -9,11 +9,11 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False from __future__ import absolute_import import logging +import os import textwrap import warnings from distutils.util import strtobool @@ -39,6 +39,7 @@ def raise_option_error(parser, option, msg): + # type: (OptionParser, Option, str) -> None """ Raise an option parsing error using parser.error(). @@ -77,14 +78,15 @@ def check_install_build_global(options, check_options=None): check_options = options def getname(n): + # type: (str) -> Optional[Any] return getattr(check_options, n, None) names = ["build_options", "global_options", "install_options"] if any(map(getname, names)): control = options.format_control control.disallow_binaries() warnings.warn( - 'Disabling all use of wheels due to the use of --build-options ' - '/ --global-options / --install-options.', stacklevel=2, + 'Disabling all use of wheels due to the use of --build-option ' + '/ --global-option / --install-option.', stacklevel=2, ) @@ -128,6 +130,17 @@ def check_dist_restriction(options, check_target=False): ) +def _path_option_check(option, opt, value): + # type: (Option, str, str) -> str + return os.path.expanduser(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path",) + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["path"] = _path_option_check + + ########### # options # ########### @@ -215,10 +228,11 @@ def check_dist_restriction(options, check_target=False): ) # type: Callable[..., Option] log = partial( - Option, + PipOption, "--log", "--log-file", "--local-log", dest="log", metavar="path", + type="path", help="Path to a verbose appending log." ) # type: Callable[..., Option] @@ -289,19 +303,19 @@ def exists_action(): cert = partial( - Option, + PipOption, '--cert', dest='cert', - type='str', + type='path', metavar='path', help="Path to alternate CA bundle.", ) # type: Callable[..., Option] client_cert = partial( - Option, + PipOption, '--client-cert', dest='client_cert', - type='str', + type='path', default=None, metavar='path', help="Path to SSL client certificate, a single file containing the " @@ -322,6 +336,7 @@ def exists_action(): def extra_index_url(): + # type: () -> Option return Option( '--extra-index-url', dest='extra_index_urls', @@ -410,12 +425,21 @@ def editable(): ) +def _handle_src(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + src = partial( - Option, + PipOption, '--src', '--source', '--source-dir', '--source-directory', dest='src_dir', + type='path', metavar='dir', default=get_src_prefix(), + action='callback', + callback=_handle_src, help='Directory to check out editable projects into. ' 'The default in a virtualenv is "/src". ' 'The default for global installs is "/src".' @@ -614,11 +638,12 @@ def prefer_binary(): cache_dir = partial( - Option, + PipOption, "--cache-dir", dest="cache_dir", default=USER_CACHE_DIR, metavar="dir", + type='path', help="Store the cache data in ." ) # type: Callable[..., Option] @@ -669,11 +694,22 @@ def _handle_no_cache_dir(option, opt, value, parser): help="Don't install package dependencies.", ) # type: Callable[..., Option] + +def _handle_build_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + if value: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + build_dir = partial( - Option, + PipOption, '-b', '--build', '--build-dir', '--build-directory', dest='build_dir', + type='path', metavar='dir', + action='callback', + callback=_handle_build_dir, help='Directory to unpack packages into and build in. Note that ' 'an initial build still takes place in a temporary directory. ' 'The location of temporary directories can be controlled by setting ' @@ -851,9 +887,10 @@ def _handle_merge_hash(option, opt_str, value, parser): list_path = partial( - Option, + PipOption, '--path', dest='path', + type='path', action='append', help='Restrict to the specified installation path for listing ' 'packages (can be used multiple times).' @@ -868,6 +905,16 @@ def check_list_path_option(options): ) +no_python_version_warning = partial( + Option, + '--no-python-version-warning', + dest='no_python_version_warning', + action='store_true', + default=False, + help='Silence deprecation warnings for upcoming unsupported Pythons.', +) # type: Callable[..., Option] + + ########## # groups # ########## @@ -895,6 +942,7 @@ def check_list_path_option(options): no_cache, disable_pip_version_check, no_color, + no_python_version_warning, ] } # type: Dict[str, Any] diff --git a/pipenv/patched/notpip/_internal/cli/command_context.py b/pipenv/patched/notpip/_internal/cli/command_context.py index d529fb6712..1ca4649461 100644 --- a/pipenv/patched/notpip/_internal/cli/command_context.py +++ b/pipenv/patched/notpip/_internal/cli/command_context.py @@ -1,19 +1,25 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from contextlib import contextmanager from pipenv.patched.notpip._vendor.contextlib2 import ExitStack +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator, ContextManager, TypeVar + + _T = TypeVar('_T', covariant=True) + class CommandContextMixIn(object): def __init__(self): + # type: () -> None super(CommandContextMixIn, self).__init__() self._in_main_context = False self._main_context = ExitStack() @contextmanager def main_context(self): + # type: () -> Iterator[None] assert not self._in_main_context self._in_main_context = True @@ -24,6 +30,7 @@ def main_context(self): self._in_main_context = False def enter_context(self, context_provider): + # type: (ContextManager[_T]) -> _T assert self._in_main_context return self._main_context.enter_context(context_provider) diff --git a/pipenv/patched/notpip/_internal/cli/main.py b/pipenv/patched/notpip/_internal/cli/main.py new file mode 100644 index 0000000000..00ed7b1239 --- /dev/null +++ b/pipenv/patched/notpip/_internal/cli/main.py @@ -0,0 +1,75 @@ +"""Primary application entrypoint. +""" +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +from pipenv.patched.notpip._internal.cli.autocompletion import autocomplete +from pipenv.patched.notpip._internal.cli.main_parser import parse_command +from pipenv.patched.notpip._internal.commands import create_command +from pipenv.patched.notpip._internal.exceptions import PipError +from pipenv.patched.notpip._internal.utils import deprecation +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + +def main(args=None): + # type: (Optional[List[str]]) -> int + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write("ERROR: %s" % exc) + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, '') + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/pipenv/patched/notpip/_internal/cli/req_command.py b/pipenv/patched/notpip/_internal/cli/req_command.py index ff76aeb876..0a5095a87c 100644 --- a/pipenv/patched/notpip/_internal/cli/req_command.py +++ b/pipenv/patched/notpip/_internal/cli/req_command.py @@ -5,18 +5,17 @@ PackageFinder machinery and all its vendored dependencies, etc. """ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - +import logging import os from functools import partial from pipenv.patched.notpip._internal.cli.base_command import Command from pipenv.patched.notpip._internal.cli.command_context import CommandContextMixIn from pipenv.patched.notpip._internal.exceptions import CommandError -from pipenv.patched.notpip._internal.index import PackageFinder +from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.legacy_resolve import Resolver from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences +from pipenv.patched.notpip._internal.network.download import Downloader from pipenv.patched.notpip._internal.network.session import PipSession from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req.constructors import ( @@ -29,7 +28,6 @@ make_link_collector, pip_self_version_check, ) -from pipenv.patched.notpip._internal.utils.misc import normalize_path from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: @@ -41,6 +39,8 @@ from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory +logger = logging.getLogger(__name__) + class SessionCommandMixin(CommandContextMixIn): @@ -48,11 +48,13 @@ class SessionCommandMixin(CommandContextMixIn): A class mixin for command classes needing _build_session(). """ def __init__(self): + # type: () -> None super(SessionCommandMixin, self).__init__() self._session = None # Optional[PipSession] @classmethod def _get_index_urls(cls, options): + # type: (Values) -> Optional[List[str]] """Return a list of index urls from user-provided options.""" index_urls = [] if not getattr(options, "no_index", False): @@ -70,13 +72,18 @@ def get_default_session(self, options): """Get a default-managed session.""" if self._session is None: self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None return self._session def _build_session(self, options, retries=None, timeout=None): # type: (Values, Optional[int], Optional[int]) -> PipSession + assert not options.cache_dir or os.path.isabs(options.cache_dir) session = PipSession( cache=( - normalize_path(os.path.join(options.cache_dir, "http")) + os.path.join(options.cache_dir, "http") if options.cache_dir else None ), retries=retries if retries is not None else options.retries, @@ -149,6 +156,9 @@ def make_requirement_preparer( temp_build_dir, # type: TempDirectory options, # type: Values req_tracker, # type: RequirementTracker + session, # type: PipSession + finder, # type: PackageFinder + use_user_site, # type: bool download_dir=None, # type: str wheel_download_dir=None, # type: str ): @@ -156,22 +166,27 @@ def make_requirement_preparer( """ Create a RequirementPreparer instance for the given parameters. """ + downloader = Downloader(session, progress_bar=options.progress_bar) + temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None + return RequirementPreparer( build_dir=temp_build_dir_path, src_dir=options.src_dir, download_dir=download_dir, wheel_download_dir=wheel_download_dir, - progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, + downloader=downloader, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, ) @staticmethod def make_resolver( preparer, # type: RequirementPreparer - session, # type: PipSession finder, # type: PackageFinder options, # type: Values wheel_cache=None, # type: Optional[WheelCache] @@ -195,7 +210,6 @@ def make_resolver( ) return Resolver( preparer=preparer, - session=session, finder=finder, make_install_req=make_install_req, use_user_site=use_user_site, @@ -204,7 +218,7 @@ def make_resolver( ignore_requires_python=ignore_requires_python, force_reinstall=force_reinstall, upgrade_strategy=upgrade_strategy, - py_version_info=py_version_info + py_version_info=py_version_info, ) def populate_requirement_set( @@ -220,9 +234,6 @@ def populate_requirement_set( """ Marshal cmd line args into a requirement set. """ - # NOTE: As a side-effect, options.require_hashes and - # requirement_set.require_hashes may be updated - for filename in options.constraints: for req_to_add in parse_requirements( filename, @@ -250,6 +261,7 @@ def populate_requirement_set( req_to_add.is_direct = True requirement_set.add_requirement(req_to_add) + # NOTE: options.require_hashes may be set if --require-hashes is True for filename in options.requirements: for req_to_add in parse_requirements( filename, @@ -258,9 +270,14 @@ def populate_requirement_set( use_pep517=options.use_pep517): req_to_add.is_direct = True requirement_set.add_requirement(req_to_add) - # If --require-hashes was a line in a requirements file, tell - # RequirementSet about it: - requirement_set.require_hashes = options.require_hashes + + # If any requirement has hash options, enable hash checking. + requirements = ( + requirement_set.unnamed_requirements + + list(requirement_set.requirements.values()) + ) + if any(req.has_hash_options for req in requirements): + options.require_hashes = True if not (args or options.editables or options.requirements): opts = {'name': self.name} @@ -274,6 +291,18 @@ def populate_requirement_set( 'You must give at least one requirement to %(name)s ' '(see "pip help %(name)s")' % opts) + @staticmethod + def trace_basic_info(finder): + # type: (PackageFinder) -> None + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + def _build_package_finder( self, options, # type: Values diff --git a/pipenv/patched/notpip/_internal/commands/debug.py b/pipenv/patched/notpip/_internal/commands/debug.py index f83e757344..1f2bb94825 100644 --- a/pipenv/patched/notpip/_internal/commands/debug.py +++ b/pipenv/patched/notpip/_internal/commands/debug.py @@ -5,8 +5,11 @@ import locale import logging +import os import sys +from pipenv.patched.notpip._vendor.certifi import where + from pipenv.patched.notpip._internal.cli import cmdoptions from pipenv.patched.notpip._internal.cli.base_command import Command from pipenv.patched.notpip._internal.cli.cmdoptions import make_target_python @@ -14,17 +17,16 @@ from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.misc import get_pip_version from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.wheel import format_tag if MYPY_CHECK_RUNNING: - from typing import Any, List + from typing import Any, List, Optional from optparse import Values logger = logging.getLogger(__name__) def show_value(name, value): - # type: (str, str) -> None + # type: (str, Optional[str]) -> None logger.info('{}: {}'.format(name, value)) @@ -65,7 +67,7 @@ def show_tags(options): with indent_log(): for tag in tags: - logger.info(format_tag(tag)) + logger.info(str(tag)) if tags_limited: msg = ( @@ -75,6 +77,25 @@ def show_tags(options): logger.info(msg) +def ca_bundle_info(config): + levels = set() + for key, value in config.items(): + levels.add(key.split('.')[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ['install', 'wheel', 'download'] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return 'global' + + levels.remove('global') + return ", ".join(levels) + + class DebugCommand(Command): """ Display debug information. @@ -90,6 +111,7 @@ def __init__(self, *args, **kw): cmd_opts = self.cmd_opts cmdoptions.add_target_python_options(cmd_opts) self.parser.insert_option_group(0, cmd_opts) + self.parser.config.load() def run(self, options, args): # type: (Values, List[Any]) -> int @@ -110,6 +132,11 @@ def run(self, options, args): show_value('sys.platform', sys.platform) show_sys_implementation() + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE')) + show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE')) + show_value("pip._vendor.certifi.where()", where()) + show_tags(options) return SUCCESS diff --git a/pipenv/patched/notpip/_internal/commands/download.py b/pipenv/patched/notpip/_internal/commands/download.py index a56f0983cd..62072e4fe0 100644 --- a/pipenv/patched/notpip/_internal/commands/download.py +++ b/pipenv/patched/notpip/_internal/commands/download.py @@ -10,8 +10,7 @@ from pipenv.patched.notpip._internal.cli.cmdoptions import make_target_python from pipenv.patched.notpip._internal.cli.req_command import RequirementCommand from pipenv.patched.notpip._internal.req import RequirementSet -from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker -from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner +from pipenv.patched.notpip._internal.req.req_tracker import get_requirement_tracker from pipenv.patched.notpip._internal.utils.misc import ensure_dir, normalize_path, write_output from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory @@ -86,7 +85,6 @@ def run(self, options, args): cmdoptions.check_dist_restriction(options) - options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) @@ -100,24 +98,12 @@ def run(self, options, args): target_python=target_python, ) build_delete = (not (options.no_clean or options.build_dir)) - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - with RequirementTracker() as req_tracker, TempDirectory( + with get_requirement_tracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="download" ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) + requirement_set = RequirementSet() self.populate_requirement_set( requirement_set, args, @@ -131,16 +117,21 @@ def run(self, options, args): temp_build_dir=directory, options=options, req_tracker=req_tracker, + session=session, + finder=finder, download_dir=options.download_dir, + use_user_site=False, ) resolver = self.make_resolver( preparer=preparer, finder=finder, - session=session, options=options, py_version_info=options.python_version, ) + + self.trace_basic_info(finder) + resolver.resolve(requirement_set) downloaded = ' '.join([ diff --git a/pipenv/patched/notpip/_internal/commands/install.py b/pipenv/patched/notpip/_internal/commands/install.py index 76ae4d6d83..794fd45929 100644 --- a/pipenv/patched/notpip/_internal/commands/install.py +++ b/pipenv/patched/notpip/_internal/commands/install.py @@ -12,6 +12,7 @@ import operator import os import shutil +import site from optparse import SUPPRESS_HELP from pipenv.patched.notpip._vendor import pkg_resources @@ -30,8 +31,10 @@ from pipenv.patched.notpip._internal.locations import distutils_scheme from pipenv.patched.notpip._internal.operations.check import check_install_conflicts from pipenv.patched.notpip._internal.req import RequirementSet, install_given_reqs -from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker -from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner +from pipenv.patched.notpip._internal.req.req_tracker import get_requirement_tracker +from pipenv.patched.notpip._internal.utils.deprecation import deprecated +from pipenv.patched.notpip._internal.utils.distutils_args import parse_distutils_args +from pipenv.patched.notpip._internal.utils.filesystem import test_writable_dir from pipenv.patched.notpip._internal.utils.misc import ( ensure_dir, get_installed_version, @@ -41,62 +44,20 @@ from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING from pipenv.patched.notpip._internal.utils.virtualenv import virtualenv_no_global -from pipenv.patched.notpip._internal.wheel import WheelBuilder +from pipenv.patched.notpip._internal.wheel_builder import build, should_build_for_install_command if MYPY_CHECK_RUNNING: from optparse import Values - from typing import Any, List, Optional + from typing import Any, Iterable, List, Optional from pipenv.patched.notpip._internal.models.format_control import FormatControl from pipenv.patched.notpip._internal.req.req_install import InstallRequirement - from pipenv.patched.notpip._internal.wheel import BinaryAllowedPredicate + from pipenv.patched.notpip._internal.wheel_builder import BinaryAllowedPredicate logger = logging.getLogger(__name__) -def is_wheel_installed(): - """ - Return whether the wheel package is installed. - """ - try: - import wheel # noqa: F401 - except ImportError: - return False - - return True - - -def build_wheels( - builder, # type: WheelBuilder - pep517_requirements, # type: List[InstallRequirement] - legacy_requirements, # type: List[InstallRequirement] -): - # type: (...) -> List[InstallRequirement] - """ - Build wheels for requirements, depending on whether wheel is installed. - """ - # We don't build wheels for legacy requirements if wheel is not installed. - should_build_legacy = is_wheel_installed() - - # Always build PEP 517 requirements - build_failures = builder.build( - pep517_requirements, - should_unpack=True, - ) - - if should_build_legacy: - # We don't care about failures building legacy - # requirements, as we'll fall through to a direct - # install for those. - builder.build( - legacy_requirements, - should_unpack=True, - ) - - return build_failures - - def get_check_binary_allowed(format_control): # type: (FormatControl) -> BinaryAllowedPredicate def check_binary_allowed(req): @@ -285,26 +246,17 @@ def run(self, options, args): if options.upgrade: upgrade_strategy = options.upgrade_strategy - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) - cmdoptions.check_dist_restriction(options, check_target=True) - options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] - if options.use_user_site: - if options.prefix_path: - raise CommandError( - "Can not combine '--user' and '--prefix' as they imply " - "different installation locations" - ) - if virtualenv_no_global(): - raise InstallationError( - "Can not perform a '--user' install. User site-packages " - "are not visible in this virtualenv." - ) - install_options.append('--user') - install_options.append('--prefix=') + + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) target_temp_dir = None # type: Optional[TempDirectory] target_temp_dir_path = None # type: Optional[str] @@ -321,7 +273,6 @@ def run(self, options, args): # Create a target directory for using with the target option target_temp_dir = TempDirectory(kind="target") target_temp_dir_path = target_temp_dir.path - install_options.append('--home=' + target_temp_dir_path) global_options = options.global_options or [] @@ -337,22 +288,10 @@ def run(self, options, args): build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None - - with RequirementTracker() as req_tracker, TempDirectory( + with get_requirement_tracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="install" ) as directory: requirement_set = RequirementSet( - require_hashes=options.require_hashes, check_supported_wheels=not options.target_dir, ) @@ -361,15 +300,22 @@ def run(self, options, args): requirement_set, args, options, finder, session, wheel_cache ) + + warn_deprecated_install_options( + requirement_set, options.install_options + ) + preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, req_tracker=req_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, ) resolver = self.make_resolver( preparer=preparer, finder=finder, - session=session, options=options, wheel_cache=wheel_cache, use_user_site=options.use_user_site, @@ -379,6 +325,9 @@ def run(self, options, args): upgrade_strategy=upgrade_strategy, use_pep517=options.use_pep517, ) + + self.trace_basic_info(finder) + resolver.resolve(requirement_set) try: @@ -396,34 +345,34 @@ def run(self, options, args): check_binary_allowed = get_check_binary_allowed( finder.format_control ) - # Consider legacy and PEP517-using requirements separately - legacy_requirements = [] - pep517_requirements = [] - for req in requirement_set.requirements.values(): - if req.use_pep517: - pep517_requirements.append(req) - else: - legacy_requirements.append(req) - - wheel_builder = WheelBuilder( - preparer, wheel_cache, - build_options=[], global_options=[], - check_binary_allowed=check_binary_allowed, - ) - build_failures = build_wheels( - builder=wheel_builder, - pep517_requirements=pep517_requirements, - legacy_requirements=legacy_requirements, + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_install_command( + r, check_binary_allowed + ) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=[], + global_options=[], ) # If we're using PEP 517, we cannot do a direct install # so we fail here. - if build_failures: + # We don't care about failures building legacy + # requirements, as we'll fall through to a direct + # install for those. + pep517_build_failures = [ + r for r in build_failures if r.use_pep517 + ] + if pep517_build_failures: raise InstallationError( "Could not build wheels for {} which use" " PEP 517 and cannot be installed directly".format( - ", ".join(r.name for r in build_failures))) + ", ".join(r.name for r in pep517_build_failures))) to_install = resolver.get_installation_order( requirement_set @@ -464,13 +413,13 @@ def run(self, options, args): ) working_set = pkg_resources.WorkingSet(lib_locations) - reqs = sorted(installed, key=operator.attrgetter('name')) + installed.sort(key=operator.attrgetter('name')) items = [] - for req in reqs: - item = req.name + for result in installed: + item = result.name try: installed_version = get_installed_version( - req.name, working_set=working_set + result.name, working_set=working_set ) if installed_version: item += '-' + installed_version @@ -595,6 +544,127 @@ def get_lib_location_guesses(*args, **kwargs): return [scheme['purelib'], scheme['platlib']] +def site_packages_writable(**kwargs): + return all( + test_writable_dir(d) for d in set(get_lib_location_guesses(**kwargs)) + ) + + +def decide_user_install( + use_user_site, # type: Optional[bool] + prefix_path=None, # type: Optional[str] + target_dir=None, # type: Optional[str] + root_path=None, # type: Optional[str] + isolated_mode=False, # type: bool +): + # type: (...) -> bool + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info("Defaulting to user installation because normal site-packages " + "is not writeable") + return True + + +def warn_deprecated_install_options(requirement_set, options): + # type: (RequirementSet, Optional[List[str]]) -> None + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + def format_options(option_names): + # type: (Iterable[str]) -> List[str] + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + requirements = ( + requirement_set.unnamed_requirements + + list(requirement_set.requirements.values()) + ) + + offenders = [] + + for requirement in requirements: + install_options = requirement.options.get("install_options", []) + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format( + format_options(location_options.keys()) + ) + ) + + if not offenders: + return + + deprecated( + reason=( + "Location-changing options found in --install-option: {}. " + "This configuration may cause unexpected behavior and is " + "unsupported.".format( + "; ".join(offenders) + ) + ), + replacement=( + "using pip-level options like --user, --prefix, --root, and " + "--target" + ), + gone_in="20.2", + issue=7309, + ) + + def create_env_error_message(error, show_traceback, using_user_site): """Format an error message for an EnvironmentError diff --git a/pipenv/patched/notpip/_internal/commands/list.py b/pipenv/patched/notpip/_internal/commands/list.py index b61b4c8c05..bc58a324a2 100644 --- a/pipenv/patched/notpip/_internal/commands/list.py +++ b/pipenv/patched/notpip/_internal/commands/list.py @@ -12,7 +12,7 @@ from pipenv.patched.notpip._internal.cli import cmdoptions from pipenv.patched.notpip._internal.cli.req_command import IndexGroupCommand from pipenv.patched.notpip._internal.exceptions import CommandError -from pipenv.patched.notpip._internal.index import PackageFinder +from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences from pipenv.patched.notpip._internal.self_outdated_check import make_link_collector from pipenv.patched.notpip._internal.utils.misc import ( diff --git a/pipenv/patched/notpip/_internal/commands/wheel.py b/pipenv/patched/notpip/_internal/commands/wheel.py index 8d963b4ace..b8a8701a4d 100644 --- a/pipenv/patched/notpip/_internal/commands/wheel.py +++ b/pipenv/patched/notpip/_internal/commands/wheel.py @@ -7,16 +7,18 @@ import logging import os +import shutil from pipenv.patched.notpip._internal.cache import WheelCache from pipenv.patched.notpip._internal.cli import cmdoptions from pipenv.patched.notpip._internal.cli.req_command import RequirementCommand from pipenv.patched.notpip._internal.exceptions import CommandError, PreviousBuildDirError from pipenv.patched.notpip._internal.req import RequirementSet -from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker +from pipenv.patched.notpip._internal.req.req_tracker import get_requirement_tracker +from pipenv.patched.notpip._internal.utils.misc import ensure_dir, normalize_path from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.wheel import WheelBuilder +from pipenv.patched.notpip._internal.wheel_builder import build, should_build_for_wheel_command if MYPY_CHECK_RUNNING: from optparse import Values @@ -114,24 +116,20 @@ def run(self, options, args): # type: (Values, List[Any]) -> None cmdoptions.check_install_build_global(options) - if options.build_dir: - options.build_dir = os.path.abspath(options.build_dir) - - options.src_dir = os.path.abspath(options.src_dir) - session = self.get_default_session(options) finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) - with RequirementTracker() as req_tracker, TempDirectory( + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + with get_requirement_tracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="wheel" ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) + requirement_set = RequirementSet() try: self.populate_requirement_set( @@ -143,30 +141,49 @@ def run(self, options, args): temp_build_dir=directory, options=options, req_tracker=req_tracker, + session=session, + finder=finder, wheel_download_dir=options.wheel_dir, + use_user_site=False, ) resolver = self.make_resolver( preparer=preparer, finder=finder, - session=session, options=options, wheel_cache=wheel_cache, ignore_requires_python=options.ignore_requires_python, use_pep517=options.use_pep517, ) + + self.trace_basic_info(finder) + resolver.resolve(requirement_set) + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_wheel_command(r) + ] + # build wheels - wb = WheelBuilder( - preparer, wheel_cache, + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, build_options=options.build_options or [], global_options=options.global_options or [], - no_clean=options.no_clean, - ) - build_failures = wb.build( - requirement_set.requirements.values(), ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + build_failures.append(req) if len(build_failures) != 0: raise CommandError( "Failed to build one or more wheels" diff --git a/pipenv/patched/notpip/_internal/configuration.py b/pipenv/patched/notpip/_internal/configuration.py index 101934eb9e..e0625e5cd8 100644 --- a/pipenv/patched/notpip/_internal/configuration.py +++ b/pipenv/patched/notpip/_internal/configuration.py @@ -13,7 +13,6 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False import locale import logging @@ -78,6 +77,7 @@ def _disassemble_key(name): def get_configuration_files(): + # type: () -> Dict[Kind, List[str]] global_config_files = [ os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs('pip') diff --git a/pipenv/patched/notpip/_internal/distributions/__init__.py b/pipenv/patched/notpip/_internal/distributions/__init__.py index 9eb3d7d6ec..be559b074e 100644 --- a/pipenv/patched/notpip/_internal/distributions/__init__.py +++ b/pipenv/patched/notpip/_internal/distributions/__init__.py @@ -1,4 +1,4 @@ -from pipenv.patched.notpip._internal.distributions.source.legacy import SourceDistribution +from pipenv.patched.notpip._internal.distributions.sdist import SourceDistribution from pipenv.patched.notpip._internal.distributions.wheel import WheelDistribution from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING diff --git a/pipenv/patched/notpip/_internal/distributions/base.py b/pipenv/patched/notpip/_internal/distributions/base.py index b479ff831e..ab4e4ea2dc 100644 --- a/pipenv/patched/notpip/_internal/distributions/base.py +++ b/pipenv/patched/notpip/_internal/distributions/base.py @@ -1,10 +1,16 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import abc from pipenv.patched.notpip._vendor.six import add_metaclass +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pipenv.patched.notpip._vendor.pkg_resources import Distribution + from pipenv.patched.notpip._internal.req import InstallRequirement + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder + @add_metaclass(abc.ABCMeta) class AbstractDistribution(object): @@ -24,13 +30,16 @@ class AbstractDistribution(object): """ def __init__(self, req): + # type: (InstallRequirement) -> None super(AbstractDistribution, self).__init__() self.req = req @abc.abstractmethod def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] raise NotImplementedError() @abc.abstractmethod def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None raise NotImplementedError() diff --git a/pipenv/patched/notpip/_internal/distributions/installed.py b/pipenv/patched/notpip/_internal/distributions/installed.py index 78f29d52ba..2bfda5e0ac 100644 --- a/pipenv/patched/notpip/_internal/distributions/installed.py +++ b/pipenv/patched/notpip/_internal/distributions/installed.py @@ -1,7 +1,11 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from pipenv.patched.notpip._internal.distributions.base import AbstractDistribution +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pipenv.patched.notpip._vendor.pkg_resources import Distribution + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder class InstalledDistribution(AbstractDistribution): @@ -12,7 +16,9 @@ class InstalledDistribution(AbstractDistribution): """ def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] return self.req.satisfied_by def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None pass diff --git a/pipenv/patched/notpip/_internal/distributions/sdist.py b/pipenv/patched/notpip/_internal/distributions/sdist.py new file mode 100644 index 0000000000..b150795ca0 --- /dev/null +++ b/pipenv/patched/notpip/_internal/distributions/sdist.py @@ -0,0 +1,104 @@ +import logging + +from pipenv.patched.notpip._internal.build_env import BuildEnvironment +from pipenv.patched.notpip._internal.distributions.base import AbstractDistribution +from pipenv.patched.notpip._internal.exceptions import InstallationError +from pipenv.patched.notpip._internal.utils.subprocess import runner_with_spinner_message +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Set, Tuple + + from pipenv.patched.notpip._vendor.pkg_resources import Distribution + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder + + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + return self.req.get_dist() + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + self._setup_isolation(finder) + + self.req.prepare_metadata() + + def _setup_isolation(self, finder): + # type: (PackageFinder) -> None + def _raise_conflicts(conflicting_with, conflicting_reqs): + # type: (str, Set[Tuple[str, str]]) -> None + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=', '.join( + '{} is incompatible with {}'.format(installed, wanted) + for installed, wanted in sorted(conflicting) + ) + ) + raise InstallationError(error_message) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, 'overlay', + "Installing build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + _raise_conflicts("PEP 517/518 supported requirements", + conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))) + ) + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build wheel" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + reqs = backend.get_requires_for_build_wheel() + + conflicting, missing = self.req.build_env.check_requirements(reqs) + if conflicting: + _raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, 'normal', + "Installing backend dependencies" + ) diff --git a/pipenv/patched/notpip/_internal/distributions/wheel.py b/pipenv/patched/notpip/_internal/distributions/wheel.py index 23e73ee710..06a59c47c4 100644 --- a/pipenv/patched/notpip/_internal/distributions/wheel.py +++ b/pipenv/patched/notpip/_internal/distributions/wheel.py @@ -1,9 +1,12 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - -from pipenv.patched.notpip._vendor import pkg_resources +from zipfile import ZipFile from pipenv.patched.notpip._internal.distributions.base import AbstractDistribution +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +if MYPY_CHECK_RUNNING: + from pipenv.patched.notpip._vendor.pkg_resources import Distribution + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder class WheelDistribution(AbstractDistribution): @@ -13,8 +16,21 @@ class WheelDistribution(AbstractDistribution): """ def get_pkg_resources_distribution(self): - return list(pkg_resources.find_distributions( - self.req.source_dir))[0] + # type: () -> Distribution + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + # Set as part of preparation during download. + assert self.req.local_file_path + # Wheels are never unnamed. + assert self.req.name + + with ZipFile(self.req.local_file_path, allowZip64=True) as z: + return pkg_resources_distribution_for_wheel( + z, self.req.name, self.req.local_file_path + ) def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None pass diff --git a/pipenv/patched/notpip/_internal/index/__init__.py b/pipenv/patched/notpip/_internal/index/__init__.py new file mode 100644 index 0000000000..7a17b7b3b6 --- /dev/null +++ b/pipenv/patched/notpip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/pipenv/patched/notpip/_internal/index/collector.py b/pipenv/patched/notpip/_internal/index/collector.py new file mode 100644 index 0000000000..25893cb923 --- /dev/null +++ b/pipenv/patched/notpip/_internal/index/collector.py @@ -0,0 +1,544 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_links(). +""" + +import cgi +import itertools +import logging +import mimetypes +import os +from collections import OrderedDict + +from pipenv.patched.notpip._vendor import html5lib, requests +from pipenv.patched.notpip._vendor.distlib.compat import unescape +from pipenv.patched.notpip._vendor.requests.exceptions import HTTPError, RetryError, SSLError +from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse +from pipenv.patched.notpip._vendor.six.moves.urllib import request as urllib_request + +from pipenv.patched.notpip._internal.models.link import Link +from pipenv.patched.notpip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pipenv.patched.notpip._internal.utils.misc import redact_auth_from_url +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.urls import path_to_url, url_to_path +from pipenv.patched.notpip._internal.vcs import is_url, vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Callable, Iterable, List, MutableMapping, Optional, Sequence, Tuple, + Union, + ) + import xml.etree.ElementTree + + from pipenv.patched.notpip._vendor.requests import Response + + from pipenv.patched.notpip._internal.models.search_scope import SearchScope + from pipenv.patched.notpip._internal.network.session import PipSession + + HTMLElement = xml.etree.ElementTree.Element + ResponseHeaders = MutableMapping[str, str] + + +logger = logging.getLogger(__name__) + + +def _match_vcs_scheme(url): + # type: (str) -> Optional[str] + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + return scheme + return None + + +def _is_url_like_archive(url): + # type: (str) -> bool + """Return whether the URL looks like an archive. + """ + filename = Link(url).filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + return True + return False + + +class _NotHTML(Exception): + def __init__(self, content_type, request_desc): + # type: (str, str) -> None + super(_NotHTML, self).__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response): + # type: (Response) -> None + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url, session): + # type: (str, PipSession) -> None + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in {'http', 'https'}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + resp.raise_for_status() + + _ensure_html_header(resp) + + +def _get_html_response(url, session): + # type: (str, PipSession) -> Response + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if _is_url_like_archive(url): + _ensure_html_response(url, session=session) + + logger.debug('Getting page %s', redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + resp.raise_for_status() + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers): + # type: (ResponseHeaders) -> Optional[str] + """Determine if we have any encoding information in our headers. + """ + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params['charset'] + return None + + +def _determine_base_url(document, page_url): + # type: (HTMLElement, str) -> str + """Determine the HTML document's base URL. + + This looks for a ```` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_link(url): + # type: (str) -> str + """Makes sure a link is fully encoded. That is, if a ' ' shows up in + the link, it will be rewritten to %20 (while not over-quoting + % or other characters).""" + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. Note that the + # `netloc` can be empty and the URI will then refer to a local + # filesystem path. + result = urllib_parse.urlparse(url) + # In both cases below we unquote prior to quoting to make sure + # nothing is double quoted. + if result.netloc == "": + # On Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + path = urllib_request.pathname2url( + urllib_request.url2pathname(result.path)) + else: + # In addition to the `/` character we protect `@` so that + # revision strings in VCS URLs are properly parsed. + path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@") + return urllib_parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + anchor, # type: HTMLElement + page_url, # type: str + base_url, # type: str +): + # type: (...) -> Optional[Link] + """ + Convert an anchor element in a simple repository page to a Link. + """ + href = anchor.get("href") + if not href: + return None + + url = _clean_link(urllib_parse.urljoin(base_url, href)) + pyrequire = anchor.get('data-requires-python') + pyrequire = unescape(pyrequire) if pyrequire else None + + yanked_reason = anchor.get('data-yanked') + if yanked_reason: + # This is a unicode string in Python 2 (and 3). + yanked_reason = unescape(yanked_reason) + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +def parse_links(page): + # type: (HTMLPage) -> Iterable[Link] + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__( + self, + content, # type: bytes + encoding, # type: Optional[str] + url, # type: str + ): + # type: (...) -> None + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + """ + self.content = content + self.encoding = encoding + self.url = url + + def __str__(self): + # type: () -> str + return redact_auth_from_url(self.url) + + +def _handle_get_page_fail( + link, # type: Link + reason, # type: Union[str, Exception] + meth=None # type: Optional[Callable[..., None]] +): + # type: (...) -> None + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response): + # type: (Response) -> HTMLPage + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage(response.content, encoding=encoding, url=response.url) + + +def _get_html_page(link, session=None): + # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.debug('Cannot look at %s URL %s', vcs_scheme, link) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib_parse.urlparse(url) + if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.debug( + 'Skipping page %s because it looks like an archive, and cannot ' + 'be checked by HEAD.', link, + ) + except _NotHTML as exc: + logger.debug( + 'Skipping page %s because the %s request got Content-Type: %s', + link, exc.request_desc, exc.content_type, + ) + except HTTPError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, "connection error: %s" % exc) + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp) + return None + + +def _remove_duplicate_links(links): + # type: (Iterable[Link]) -> List[Link] + """ + Return a list of links, with duplicates removed and ordering preserved. + """ + # We preserve the ordering when removing duplicates because we can. + return list(OrderedDict.fromkeys(links)) + + +def group_locations(locations, expand_dir=False): + # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] + """ + Divide a list of locations into two groups: "files" (archives) and "urls." + + :return: A pair of lists (files, urls). + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + # type: (str) -> None + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + else: + logger.warning( + "Path '{0}' is ignored: " + "it is a directory.".format(path), + ) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url, + ) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url, + ) + + return files, urls + + +class CollectedLinks(object): + + """ + Encapsulates the return value of a call to LinkCollector.collect_links(). + + The return value includes both URLs to project pages containing package + links, as well as individual package Link objects collected from other + sources. + + This info is stored separately as: + + (1) links from the configured file locations, + (2) links from the configured find_links, and + (3) urls to HTML project pages, as described by the PEP 503 simple + repository API. + """ + + def __init__( + self, + files, # type: List[Link] + find_links, # type: List[Link] + project_urls, # type: List[Link] + ): + # type: (...) -> None + """ + :param files: Links from file locations. + :param find_links: Links from find_links. + :param project_urls: URLs to HTML project pages, as described by + the PEP 503 simple repository API. + """ + self.files = files + self.find_links = find_links + self.project_urls = project_urls + + +class LinkCollector(object): + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_links() method. + """ + + def __init__( + self, + session, # type: PipSession + search_scope, # type: SearchScope + ): + # type: (...) -> None + self.search_scope = search_scope + self.session = session + + @property + def find_links(self): + # type: () -> List[str] + return self.search_scope.find_links + + def fetch_page(self, location): + # type: (Link) -> Optional[HTMLPage] + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_links(self, project_name): + # type: (str) -> CollectedLinks + """Find all available links for the given project name. + + :return: All the Link objects (unfiltered), as a CollectedLinks object. + """ + search_scope = self.search_scope + index_locations = search_scope.get_index_urls_locations(project_name) + index_file_loc, index_url_loc = group_locations(index_locations) + fl_file_loc, fl_url_loc = group_locations( + self.find_links, expand_dir=True, + ) + + file_links = [ + Link(url) for url in itertools.chain(index_file_loc, fl_file_loc) + ] + + # We trust every directly linked archive in find_links + find_link_links = [Link(url, '-f') for url in self.find_links] + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links. + # We want to filter out anything that does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + (Link(url) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + ) + if self.session.is_secure_origin(link) + ] + + url_locations = _remove_duplicate_links(url_locations) + lines = [ + '{} location(s) to search for versions of {}:'.format( + len(url_locations), project_name, + ), + ] + for link in url_locations: + lines.append('* {}'.format(link)) + logger.debug('\n'.join(lines)) + + return CollectedLinks( + files=file_links, + find_links=find_link_links, + project_urls=url_locations, + ) diff --git a/pipenv/patched/notpip/_internal/index/package_finder.py b/pipenv/patched/notpip/_internal/index/package_finder.py new file mode 100644 index 0000000000..e8a806a448 --- /dev/null +++ b/pipenv/patched/notpip/_internal/index/package_finder.py @@ -0,0 +1,1049 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import re + +from pipenv.patched.notpip._vendor.packaging import specifiers +from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name +from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version + +from pipenv.patched.notpip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pipenv.patched.notpip._internal.index.collector import parse_links +from pipenv.patched.notpip._internal.models.candidate import InstallationCandidate +from pipenv.patched.notpip._internal.models.format_control import FormatControl +from pipenv.patched.notpip._internal.models.link import Link +from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences +from pipenv.patched.notpip._internal.models.target_python import TargetPython +from pipenv.patched.notpip._internal.models.wheel import Wheel +from pipenv.patched.notpip._internal.utils.filetypes import WHEEL_EXTENSION +from pipenv.patched.notpip._internal.utils.logging import indent_log +from pipenv.patched.notpip._internal.utils.misc import build_netloc +from pipenv.patched.notpip._internal.utils.packaging import check_requires_python +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +from pipenv.patched.notpip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union, + ) + + from pipenv.patched.notpip._vendor.packaging.tags import Tag + from pipenv.patched.notpip._vendor.packaging.version import _BaseVersion + + from pipenv.patched.notpip._internal.index.collector import LinkCollector + from pipenv.patched.notpip._internal.models.search_scope import SearchScope + from pipenv.patched.notpip._internal.req import InstallRequirement + from pipenv.patched.notpip._internal.utils.hashes import Hashes + + BuildTag = Union[Tuple[()], Tuple[int, str]] + CandidateSortingKey = ( + Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] + ) + + +__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] + + +logger = logging.getLogger(__name__) + + +def _check_link_requires_python( + link, # type: Link + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> bool + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, link, + ) + else: + if not is_compatible: + version = '.'.join(map(str, version_info)) + if not ignore_requires_python: + logger.debug( + 'Link requires a different Python (%s not in: %r): %s', + version, link.requires_python, link, + ) + return False + + logger.debug( + 'Ignoring failed Requires-Python check (%s not in: %r) ' + 'for link: %s', + version, link.requires_python, link, + ) + + return True + + +class LinkEvaluator(object): + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name, # type: str + canonical_name, # type: str + formats, # type: FrozenSet[str] + target_python, # type: TargetPython + allow_yanked, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ignore_compatibility=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + :param Optional[bool] ignore_compatibility: Whether to ignore + compatibility of python versions and allow all versions of packages. + """ + if ignore_requires_python is None: + ignore_requires_python = False + if ignore_compatibility is None: + ignore_compatibility = True + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + self._ignore_compatibility = ignore_compatibility + + self.project_name = project_name + + def evaluate_link(self, link): + # type: (Link) -> Tuple[bool, Optional[Text]] + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or '' + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + return (False, u'yanked for reason: {}'.format(reason)) + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, 'not a file') + if ext not in SUPPORTED_EXTENSIONS: + return (False, 'unsupported archive format: %s' % ext) + if "binary" not in self._formats and ext == WHEEL_EXTENSION and not self._ignore_compatibility: + reason = 'No binaries permitted for %s' % self.project_name + return (False, reason) + if "macosx10" in link.path and ext == '.zip' and not self._ignore_compatibility: + return (False, 'macosx10 one') + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, 'invalid wheel filename') + if canonicalize_name(wheel.name) != self._canonical_name: + reason = 'wrong project name (not %s)' % self.project_name + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags) and not self._ignore_compatibility: + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags match: {}".format( + ', '.join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + return (False, 'No sources permitted for %s' % self.project_name) + + if not version: + version = _extract_version_from_fragment( + egg_info, self._canonical_name, + ) + if not version: + return ( + False, 'Missing project version for %s' % self.project_name, + ) + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, 'Python version is incorrect') + + supports_python = _check_link_requires_python( + link, version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python and not self._ignore_compatibility: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug('Found link %s, version: %s', link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates, # type: List[InstallationCandidate] + hashes, # type: Hashes + project_name, # type: str +): + # type: (...) -> List[InstallationCandidate] + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + 'Given no hashes to check %s links for project %r: ' + 'discarding no candidates', + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = 'discarding no candidates' + else: + discard_message = 'discarding {} non-matches:\n {}'.format( + len(non_matches), + '\n '.join(str(candidate.link) for candidate in non_matches) + ) + + logger.debug( + 'Checked %s links for project %r against %s hashes ' + '(%s matches, %s no digest): %s', + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message + ) + + return filtered + + +class CandidatePreferences(object): + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + ): + # type: (...) -> None + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult(object): + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates, # type: List[InstallationCandidate] + applicable_candidates, # type: List[InstallationCandidate] + best_candidate, # type: Optional[InstallationCandidate] + ): + # type: (...) -> None + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through all candidates. + """ + return iter(self._candidates) + + def iter_applicable(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through the applicable candidates. + """ + return iter(self._applicable_candidates) + + +class CandidateEvaluator(object): + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name, # type: str + target_python=None, # type: Optional[TargetPython] + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name, # type: str + supported_tags, # type: List[Tag] + specifier, # type: specifiers.BaseSpecifier + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> None + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + + def get_applicable_candidates( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> List[InstallationCandidate] + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [ + c for c in candidates if str(c.version) in versions + ] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate, ignore_compatibility=True): + # type: (InstallationCandidate, bool) -> CandidateSortingKey + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag = () # type: BuildTag + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + if not wheel.supported(valid_tags) and not ignore_compatibility: + raise UnsupportedWheel( + "%s is not a supported wheel for this platform. It " + "can't be sorted." % wheel.filename + ) + if self._prefer_binary: + binary_preference = 1 + tags = self.valid_tags if not ignore_compatibility else None + try: + pri = -(wheel.support_index_min(tags=tags)) + except TypeError: + pri = -(support_num) + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, yank_value, binary_preference, candidate.version, + build_tag, pri, + ) + + def sort_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> Optional[InstallationCandidate] + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + + best_candidate = max(candidates, key=self._sort_key) + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or '' + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + u'The candidate selected for download or install is a ' + 'yanked version: {candidate}\n' + 'Reason for being yanked: {reason}' + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return best_candidate + + def compute_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> BestCandidateResult + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector, # type: LinkCollector + target_python, # type: TargetPython + allow_yanked, # type: bool + format_control=None, # type: Optional[FormatControl] + candidate_prefs=None, # type: CandidatePreferences + ignore_requires_python=None, # type: Optional[bool] + ignore_compatibility=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + if ignore_compatibility is None: + ignore_compatibility = False + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + self._ignore_compatibility = ignore_compatibility + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links = set() # type: Set[Link] + + # Kenneth's Hack + self.extra = None + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector, # type: LinkCollector + selection_prefs, # type: SelectionPreferences + target_python=None, # type: Optional[TargetPython] + ): + # type: (...) -> PackageFinder + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @staticmethod + def get_extras_links(links): + requires = [] + extras = {} + + current_list = requires + + for link in links: + if not link: + current_list = requires + if link.startswith('['): + current_list = [] + extras[link[1:-1]] = current_list + else: + current_list.append(link) + return extras + + @property + def search_scope(self): + # type: () -> SearchScope + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope): + # type: (SearchScope) -> None + self._link_collector.search_scope = search_scope + + @property + def find_links(self): + # type: () -> List[str] + return self._link_collector.find_links + + @property + def index_urls(self): + # type: () -> List[str] + return self.search_scope.index_urls + + @property + def trusted_hosts(self): + # type: () -> Iterable[str] + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self): + # type: () -> bool + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self): + # type: () -> None + self._candidate_prefs.allow_all_prereleases = True + + def make_link_evaluator(self, project_name): + # type: (str) -> LinkEvaluator + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ignore_compatibility=self._ignore_compatibility + ) + + def _sort_links(self, links): + # type: (Iterable[Link]) -> List[Link] + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() # type: Set[Link] + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link, reason): + # type: (Link, Text) -> None + if link not in self._logged_links: + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + # Also, put the link at the end so the reason is more visible + # and because the link string is usually very long. + logger.debug(u'Skipping link: %s: %s', reason, link) + self._logged_links.add(link) + + def get_install_candidate(self, link_evaluator, link): + # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + # Convert the Text result to str since InstallationCandidate + # accepts str. + version=str(result), + requires_python=getattr(link, "requires_python", None) + ) + + def evaluate_links(self, link_evaluator, links): + # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url(self, project_url, link_evaluator): + # type: (Link, LinkEvaluator) -> List[InstallationCandidate] + logger.debug( + 'Fetching project page and analyzing links: %s', project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + def find_all_candidates(self, project_name): + # type: (str) -> List[InstallationCandidate] + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + collected_links = self._link_collector.collect_links(project_name) + + link_evaluator = self.make_link_evaluator(project_name) + + find_links_versions = self.evaluate_links( + link_evaluator, + links=collected_links.find_links, + ) + + page_versions = [] + for project_url in collected_links.project_urls: + package_links = self.process_project_url( + project_url, link_evaluator=link_evaluator, + ) + page_versions.extend(package_links) + + file_versions = self.evaluate_links( + link_evaluator, + links=collected_links.files, + ) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.link.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return file_versions + find_links_versions + page_versions + + def make_candidate_evaluator( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object to use. + """ + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + def find_best_candidate( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> BestCandidateResult + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement(self, req, upgrade): + # type: (InstallRequirement, bool) -> Optional[Link] + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a Link if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, specifier=req.specifier, hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version = None # type: Optional[_BaseVersion] + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + + def _format_versions(cand_iter): + # type: (Iterable[InstallationCandidate]) -> str + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pipenv.patched.notpip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ", ".join(sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + )) or "none" + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + 'No matching distribution found for %s' % req + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate.link + + +def _find_name_version_sep(fragment, canonical_name): + # type: (str, str) -> int + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError("{} does not match {}".format(fragment, canonical_name)) + + +def _extract_version_from_fragment(fragment, canonical_name): + # type: (str, str) -> Optional[str] + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/pipenv/patched/notpip/_internal/legacy_resolve.py b/pipenv/patched/notpip/_internal/legacy_resolve.py index 674efd09c8..02f3e97a73 100644 --- a/pipenv/patched/notpip/_internal/legacy_resolve.py +++ b/pipenv/patched/notpip/_internal/legacy_resolve.py @@ -29,11 +29,7 @@ UnsupportedPythonVersion, ) from pipenv.patched.notpip._internal.utils.logging import indent_log -from pipenv.patched.notpip._internal.utils.misc import ( - dist_in_usersite, - ensure_dir, - normalize_version_info, -) +from pipenv.patched.notpip._internal.utils.misc import dist_in_usersite, normalize_version_info from pipenv.patched.notpip._internal.utils.packaging import ( check_requires_python, get_requires_python, @@ -45,8 +41,7 @@ from pipenv.patched.notpip._vendor import pkg_resources from pipenv.patched.notpip._internal.distributions import AbstractDistribution - from pipenv.patched.notpip._internal.network.session import PipSession - from pipenv.patched.notpip._internal.index import PackageFinder + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req.req_install import InstallRequirement from pipenv.patched.notpip._internal.req.req_set import RequirementSet @@ -54,6 +49,7 @@ InstallRequirementProvider = Callable[ [str, InstallRequirement], InstallRequirement ] + DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] logger = logging.getLogger(__name__) @@ -116,7 +112,6 @@ class Resolver(object): def __init__( self, preparer, # type: RequirementPreparer - session, # type: PipSession finder, # type: PackageFinder make_install_req, # type: InstallRequirementProvider use_user_site, # type: bool @@ -141,10 +136,6 @@ def __init__( self.preparer = preparer self.finder = finder - self.session = session - - # This is set in resolve - self.require_hashes = None # type: Optional[bool] self.upgrade_strategy = upgrade_strategy self.force_reinstall = force_reinstall @@ -159,7 +150,7 @@ def __init__( self.ignore_requires_python = True self._discovered_dependencies = \ - defaultdict(list) # type: DefaultDict[str, List] + defaultdict(list) # type: DiscoveredDependencies def resolve(self, requirement_set): # type: (RequirementSet) -> None @@ -173,26 +164,12 @@ def resolve(self, requirement_set): possible to move the preparation to become a step separated from dependency resolution. """ - # make the wheelhouse - if self.preparer.wheel_download_dir: - ensure_dir(self.preparer.wheel_download_dir) - # If any top-level requirement has a hash specified, enter # hash-checking mode, which requires hashes from all. root_reqs = ( requirement_set.unnamed_requirements + list(requirement_set.requirements.values()) ) - self.require_hashes = ( - requirement_set.require_hashes or - any(req.has_hash_options for req in root_reqs) - ) - - # Display where finder is looking for packages - search_scope = self.finder.search_scope - locations = search_scope.get_formatted_locations() - if locations: - logger.info(locations) # Actually prepare the files, and collect any exceptions. Most hash # exceptions cannot be checked ahead of time, because @@ -202,9 +179,7 @@ def resolve(self, requirement_set): hash_errors = HashErrors() for req in chain(root_reqs, discovered_reqs): try: - discovered_reqs.extend( - self._resolve_one(requirement_set, req) - ) + discovered_reqs.extend(self._resolve_one(requirement_set, req)) except HashError as exc: exc.req = req hash_errors.append(exc) @@ -230,7 +205,7 @@ def _set_req_to_reinstall(self, req): # Don't uninstall the conflict if doing a user install and the # conflict is not a user install. if not self.use_user_site or dist_in_usersite(req.satisfied_by): - req.conflicts_with = req.satisfied_by + req.should_reinstall = True req.satisfied_by = None def _check_skip_installed(self, req_to_install): @@ -291,14 +266,8 @@ def _get_abstract_dist_for(self, req): """Takes a InstallRequirement and returns a single AbstractDist \ representing a prepared variant of the same. """ - assert self.require_hashes is not None, ( - "require_hashes should have been set in Resolver.resolve()" - ) - if req.editable: - return self.preparer.prepare_editable_requirement( - req, self.require_hashes, self.use_user_site, self.finder, - ) + return self.preparer.prepare_editable_requirement(req) # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. @@ -307,16 +276,15 @@ def _get_abstract_dist_for(self, req): if req.satisfied_by: return self.preparer.prepare_installed_requirement( - req, self.require_hashes, skip_reason + req, skip_reason ) upgrade_allowed = self._is_upgrade_allowed(req) # We eagerly populate the link, since that's our "legacy" behavior. - req.populate_link(self.finder, upgrade_allowed, self.require_hashes) - abstract_dist = self.preparer.prepare_linked_requirement( - req, self.session, self.finder, self.require_hashes - ) + require_hashes = self.preparer.require_hashes + req.populate_link(self.finder, upgrade_allowed, require_hashes) + abstract_dist = self.preparer.prepare_linked_requirement(req) # NOTE # The following portion is for determining if a certain package is @@ -413,10 +381,13 @@ def add_req(subreq, extras_requested): # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + req_to_install.is_direct = True + assert req_to_install.is_direct available_requested = sorted( set(dist.extras) & set(req_to_install.extras) ) - req_to_install.is_direct = True requirement_set.add_requirement( req_to_install, parent_req_name=None, extras_requested=available_requested, diff --git a/pipenv/patched/notpip/_internal/locations.py b/pipenv/patched/notpip/_internal/locations.py index 4bd3c87ab4..f36e7eb91e 100644 --- a/pipenv/patched/notpip/_internal/locations.py +++ b/pipenv/patched/notpip/_internal/locations.py @@ -2,7 +2,6 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False from __future__ import absolute_import @@ -14,14 +13,18 @@ import sysconfig from distutils import sysconfig as distutils_sysconfig from distutils.command.install import SCHEME_KEYS # type: ignore +from distutils.command.install import install as distutils_install_command +from pipenv.patched.notpip._internal.models.scheme import Scheme from pipenv.patched.notpip._internal.utils import appdirs from pipenv.patched.notpip._internal.utils.compat import WINDOWS -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING, cast from pipenv.patched.notpip._internal.utils.virtualenv import running_under_virtualenv if MYPY_CHECK_RUNNING: - from typing import Any, Union, Dict, List, Optional + from typing import Dict, List, Optional, Union + + from distutils.cmd import Command as DistutilsCommand # Application Directories @@ -38,6 +41,7 @@ def get_major_minor_version(): def get_src_prefix(): + # type: () -> str if running_under_virtualenv(): src_prefix = os.path.join(sys.prefix, 'src') else: @@ -88,29 +92,25 @@ def get_src_prefix(): bin_py = '/usr/local/bin' -def distutils_scheme(dist_name, user=False, home=None, root=None, - isolated=False, prefix=None): - # type:(str, bool, str, str, bool, str) -> dict +def distutils_scheme( + dist_name, user=False, home=None, root=None, isolated=False, prefix=None +): + # type:(str, bool, str, str, bool, str) -> Dict[str, str] """ Return a distutils install scheme """ from distutils.dist import Distribution - scheme = {} - - if isolated: - extra_dist_args = {"script_args": ["--no-user-cfg"]} - else: - extra_dist_args = {} dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] - dist_args.update(extra_dist_args) + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) - # Ignoring, typeshed issue reported python/typeshed/issues/2567 d.parse_config_files() - # NOTE: Ignoring type since mypy can't find attributes on 'Command' - i = d.get_command_obj('install', create=True) # type: Any - assert i is not None + obj = None # type: Optional[DistutilsCommand] + obj = d.get_command_obj('install', create=True) + assert obj is not None + i = cast(distutils_install_command, obj) # NOTE: setting user or home has the side-effect of creating the home dir # or user base for installations during finalize_options() # ideally, we'd prefer a scheme class that has no side-effects. @@ -123,6 +123,8 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, i.home = home or i.home i.root = root or i.root i.finalize_options() + + scheme = {} for key in SCHEME_KEYS: scheme[key] = getattr(i, 'install_' + key) @@ -131,9 +133,7 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config - - # Ignoring, typeshed issue reported python/typeshed/issues/2567 - if 'install_lib' in d.get_option_dict('install'): # type: ignore + if 'install_lib' in d.get_option_dict('install'): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): @@ -154,3 +154,41 @@ def distutils_scheme(dist_name, user=False, home=None, root=None, ) return scheme + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None, # type: Optional[str] +): + # type: (...) -> Scheme + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme( + dist_name, user, home, root, isolated, prefix + ) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) diff --git a/pipenv/patched/notpip/_internal/main.py b/pipenv/patched/notpip/_internal/main.py index ed712c4243..bf807cb3d2 100644 --- a/pipenv/patched/notpip/_internal/main.py +++ b/pipenv/patched/notpip/_internal/main.py @@ -1,47 +1,16 @@ -"""Primary application entrypoint. -""" -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from __future__ import absolute_import - -import locale -import logging -import os -import sys - -from pipenv.patched.notpip._internal.cli.autocompletion import autocomplete -from pipenv.patched.notpip._internal.cli.main_parser import parse_command -from pipenv.patched.notpip._internal.commands import create_command -from pipenv.patched.notpip._internal.exceptions import PipError -from pipenv.patched.notpip._internal.utils import deprecation - -logger = logging.getLogger(__name__) +if MYPY_CHECK_RUNNING: + from typing import Optional, List def main(args=None): - if args is None: - args = sys.argv[1:] - - # Configure our deprecation warnings to be sent through loggers - deprecation.install_warning_logger() - - autocomplete() - - try: - cmd_name, cmd_args = parse_command(args) - except PipError as exc: - sys.stderr.write("ERROR: %s" % exc) - sys.stderr.write(os.linesep) - sys.exit(1) + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. - # Needed for locale.getpreferredencoding(False) to work - # in pip._internal.utils.encoding.auto_decode - try: - locale.setlocale(locale.LC_ALL, '') - except locale.Error as e: - # setlocale can apparently crash if locale are uninitialized - logger.debug("Ignoring error %s when setting locale", e) - command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pipenv.patched.notpip._internal.utils.entrypoints import _wrapper - return command.main(cmd_args) + return _wrapper(args) diff --git a/pipenv/patched/notpip/_internal/models/candidate.py b/pipenv/patched/notpip/_internal/models/candidate.py index 937d872f3f..1e3abf0d1e 100644 --- a/pipenv/patched/notpip/_internal/models/candidate.py +++ b/pipenv/patched/notpip/_internal/models/candidate.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version from pipenv.patched.notpip._internal.utils.models import KeyBasedCompareMixin @@ -9,32 +6,32 @@ if MYPY_CHECK_RUNNING: from pipenv.patched.notpip._vendor.packaging.version import _BaseVersion from pipenv.patched.notpip._internal.models.link import Link - from typing import Any class InstallationCandidate(KeyBasedCompareMixin): """Represents a potential "candidate" for installation. """ - def __init__(self, project, version, link, requires_python=None): - # type: (Any, str, Link, Any) -> None - self.project = project + def __init__(self, name, version, link, requires_python=None): + # type: (str, str, Link, Any) -> None + self.name = name self.version = parse_version(version) # type: _BaseVersion self.link = link self.requires_python = requires_python super(InstallationCandidate, self).__init__( - key=(self.project, self.version, self.link), + key=(self.name, self.version, self.link), defining_class=InstallationCandidate ) def __repr__(self): # type: () -> str return "".format( - self.project, self.version, self.link, + self.name, self.version, self.link, ) def __str__(self): + # type: () -> str return '{!r} candidate (version {} at {})'.format( - self.project, self.version, self.link, + self.name, self.version, self.link, ) diff --git a/pipenv/patched/notpip/_internal/models/format_control.py b/pipenv/patched/notpip/_internal/models/format_control.py index cbb5795876..2e4648948f 100644 --- a/pipenv/patched/notpip/_internal/models/format_control.py +++ b/pipenv/patched/notpip/_internal/models/format_control.py @@ -1,6 +1,5 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name @@ -16,7 +15,7 @@ class FormatControl(object): """ def __init__(self, no_binary=None, only_binary=None): - # type: (Optional[Set], Optional[Set]) -> None + # type: (Optional[Set[str]], Optional[Set[str]]) -> None if no_binary is None: no_binary = set() if only_binary is None: @@ -26,12 +25,15 @@ def __init__(self, no_binary=None, only_binary=None): self.only_binary = only_binary def __eq__(self, other): + # type: (object) -> bool return self.__dict__ == other.__dict__ def __ne__(self, other): + # type: (object) -> bool return not self.__eq__(other) def __repr__(self): + # type: () -> str return "{}({}, {})".format( self.__class__.__name__, self.no_binary, @@ -40,7 +42,7 @@ def __repr__(self): @staticmethod def handle_mutual_excludes(value, target, other): - # type: (str, Optional[Set], Optional[Set]) -> None + # type: (str, Optional[Set[str]], Optional[Set[str]]) -> None if value.startswith('-'): raise CommandError( "--no-binary / --only-binary option requires 1 argument." @@ -63,7 +65,7 @@ def handle_mutual_excludes(value, target, other): target.add(name) def get_allowed_formats(self, canonical_name): - # type: (str) -> FrozenSet + # type: (str) -> FrozenSet[str] result = {"binary", "source"} if canonical_name in self.only_binary: result.discard('source') diff --git a/pipenv/patched/notpip/_internal/models/link.py b/pipenv/patched/notpip/_internal/models/link.py index 688bd14f6c..719ebe792e 100644 --- a/pipenv/patched/notpip/_internal/models/link.py +++ b/pipenv/patched/notpip/_internal/models/link.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import os import posixpath import re @@ -19,7 +16,7 @@ if MYPY_CHECK_RUNNING: from typing import Optional, Text, Tuple, Union - from pipenv.patched.notpip._internal.collector import HTMLPage + from pipenv.patched.notpip._internal.index.collector import HTMLPage from pipenv.patched.notpip._internal.utils.hashes import Hashes @@ -67,6 +64,7 @@ def __init__( super(Link, self).__init__(key=url, defining_class=Link) def __str__(self): + # type: () -> str if self.requires_python: rp = ' (requires-python:%s)' % self.requires_python else: @@ -78,6 +76,7 @@ def __str__(self): return redact_auth_from_url(str(self._url)) def __repr__(self): + # type: () -> str return '' % self @property @@ -180,7 +179,7 @@ def hash_name(self): @property def show_url(self): - # type: () -> Optional[str] + # type: () -> str return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) @property @@ -211,6 +210,7 @@ def is_yanked(self): @property def has_hash(self): + # type: () -> bool return self.hash_name is not None def is_hash_allowed(self, hashes): diff --git a/pipenv/patched/notpip/_internal/models/scheme.py b/pipenv/patched/notpip/_internal/models/scheme.py new file mode 100644 index 0000000000..af07b4078f --- /dev/null +++ b/pipenv/patched/notpip/_internal/models/scheme.py @@ -0,0 +1,25 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +class Scheme(object): + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + def __init__( + self, + platlib, # type: str + purelib, # type: str + headers, # type: str + scripts, # type: str + data, # type: str + ): + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/pipenv/patched/notpip/_internal/models/search_scope.py b/pipenv/patched/notpip/_internal/models/search_scope.py index 9e82ccb36f..b0ad9a5e02 100644 --- a/pipenv/patched/notpip/_internal/models/search_scope.py +++ b/pipenv/patched/notpip/_internal/models/search_scope.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import itertools import logging import os @@ -10,7 +7,7 @@ from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse from pipenv.patched.notpip._internal.models.index import PyPI -from pipenv.patched.notpip._internal.utils.compat import HAS_TLS +from pipenv.patched.notpip._internal.utils.compat import has_tls from pipenv.patched.notpip._internal.utils.misc import normalize_path, redact_auth_from_url from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -52,7 +49,7 @@ def create( # If we don't have TLS enabled, then WARN if anyplace we're looking # relies on TLS. - if not HAS_TLS: + if not has_tls(): for link in itertools.chain(index_urls, built_find_links): parsed = urllib_parse.urlparse(link) if parsed.scheme == 'https': @@ -101,6 +98,7 @@ def get_index_urls_locations(self, project_name): """ def mkurl_pypi_url(url): + # type: (str) -> str loc = posixpath.join( url, urllib_parse.quote(canonicalize_name(project_name))) diff --git a/pipenv/patched/notpip/_internal/models/target_python.py b/pipenv/patched/notpip/_internal/models/target_python.py index c815b743cc..4a36fbf81b 100644 --- a/pipenv/patched/notpip/_internal/models/target_python.py +++ b/pipenv/patched/notpip/_internal/models/target_python.py @@ -6,7 +6,8 @@ if MYPY_CHECK_RUNNING: from typing import List, Optional, Tuple - from pipenv.patched.notpip._internal.pep425tags import Pep425Tag + + from pipenv.patched.notpip._vendor.packaging.tags import Tag class TargetPython(object): @@ -55,7 +56,7 @@ def __init__( self.py_version_info = py_version_info # This is used to cache the return value of get_tags(). - self._valid_tags = None # type: Optional[List[Pep425Tag]] + self._valid_tags = None # type: Optional[List[Tag]] def format_given(self): # type: () -> str @@ -80,7 +81,7 @@ def format_given(self): ) def get_tags(self): - # type: () -> List[Pep425Tag] + # type: () -> List[Tag] """ Return the supported PEP 425 tags to check wheel candidates against. @@ -91,12 +92,12 @@ def get_tags(self): # versions=None uses special default logic. py_version_info = self._given_py_version_info if py_version_info is None: - versions = None + version = None else: - versions = [version_info_to_nodot(py_version_info)] + version = version_info_to_nodot(py_version_info) tags = get_supported( - versions=versions, + version=version, platform=self.platform, abi=self.abi, impl=self.implementation, diff --git a/pipenv/patched/notpip/_internal/models/wheel.py b/pipenv/patched/notpip/_internal/models/wheel.py new file mode 100644 index 0000000000..3ee1891ed4 --- /dev/null +++ b/pipenv/patched/notpip/_internal/models/wheel.py @@ -0,0 +1,78 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re + +from pipenv.patched.notpip._vendor.packaging.tags import Tag + +from pipenv.patched.notpip._internal.exceptions import InvalidWheelFilename +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +class Wheel(object): + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P(?P.+?)-(?P.*?)) + ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + # type: (str) -> None + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "%s is not a valid wheel filename." % filename + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.build_tag = wheel_info.group('build') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self): + # type: () -> List[str] + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags): + # type: (List[Tag]) -> int + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def supported(self, tags): + # type: (List[Tag]) -> bool + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/pipenv/patched/notpip/_internal/network/cache.py b/pipenv/patched/notpip/_internal/network/cache.py index 9954009c85..c25b161bad 100644 --- a/pipenv/patched/notpip/_internal/network/cache.py +++ b/pipenv/patched/notpip/_internal/network/cache.py @@ -9,6 +9,7 @@ from pipenv.patched.notpip._vendor.cachecontrol.cache import BaseCache from pipenv.patched.notpip._vendor.cachecontrol.caches import FileCache +from pipenv.patched.notpip._vendor.requests.models import Response from pipenv.patched.notpip._internal.utils.filesystem import adjacent_tmp_file, replace from pipenv.patched.notpip._internal.utils.misc import ensure_dir @@ -18,6 +19,11 @@ from typing import Optional +def is_from_cache(response): + # type: (Response) -> bool + return getattr(response, "from_cache", False) + + @contextmanager def suppressed_cache_errors(): """If we can't access the cache then we can just skip caching and process diff --git a/pipenv/patched/notpip/_internal/network/download.py b/pipenv/patched/notpip/_internal/network/download.py new file mode 100644 index 0000000000..60fc9d539a --- /dev/null +++ b/pipenv/patched/notpip/_internal/network/download.py @@ -0,0 +1,200 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os + +from pipenv.patched.notpip._vendor import requests +from pipenv.patched.notpip._vendor.requests.models import CONTENT_CHUNK_SIZE + +from pipenv.patched.notpip._internal.models.index import PyPI +from pipenv.patched.notpip._internal.network.cache import is_from_cache +from pipenv.patched.notpip._internal.network.utils import response_chunks +from pipenv.patched.notpip._internal.utils.misc import ( + format_size, + redact_auth_from_url, + splitext, +) +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.ui import DownloadProgressProvider + +if MYPY_CHECK_RUNNING: + from typing import Iterable, Optional + + from pipenv.patched.notpip._vendor.requests.models import Response + + from pipenv.patched.notpip._internal.models.link import Link + from pipenv.patched.notpip._internal.network.session import PipSession + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp): + # type: (Response) -> Optional[int] + try: + return int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp, # type: Response + link, # type: Link + progress_bar # type: str +): + # type: (...) -> Iterable[bytes] + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = '{} ({})'.format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + return DownloadProgressProvider( + progress_bar, max=total_length + )(chunks) + + +def sanitize_content_filename(filename): + # type: (str) -> str + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition, default_filename): + # type: (str, str) -> str + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get('filename') + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp, link): + # type: (Response, Link) -> str + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext = splitext(filename)[1] # type: Optional[str] + if not ext: + ext = mimetypes.guess_extension( + resp.headers.get('content-type', '') + ) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session, link): + # type: (PipSession, Link) -> Response + target_url = link.url.split('#', 1)[0] + resp = session.get( + target_url, + # We use Accept-Encoding: identity here because requests + # defaults to accepting compressed responses. This breaks in + # a variety of ways depending on how the server is configured. + # - Some servers will notice that the file isn't a compressible + # file and will leave the file alone and with an empty + # Content-Encoding + # - Some servers will notice that the file is already + # compressed and will leave the file alone and will add a + # Content-Encoding: gzip header + # - Some servers won't notice anything at all and will take + # a file that's already been compressed and compress it again + # and set the Content-Encoding: gzip header + # By setting this to request only the identity encoding We're + # hoping to eliminate the third case. Hopefully there does not + # exist a server which when given a file will notice it is + # already compressed and that you're not asking for a + # compressed file and will then decompress it before sending + # because if that's the case I don't think it'll ever be + # possible to make this work. + headers={"Accept-Encoding": "identity"}, + stream=True, + ) + resp.raise_for_status() + return resp + + +class Download(object): + def __init__( + self, + response, # type: Response + filename, # type: str + chunks, # type: Iterable[bytes] + ): + # type: (...) -> None + self.response = response + self.filename = filename + self.chunks = chunks + + +class Downloader(object): + def __init__( + self, + session, # type: PipSession + progress_bar, # type: str + ): + # type: (...) -> None + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link): + # type: (Link) -> Download + try: + resp = _http_get_download(self._session, link) + except requests.HTTPError as e: + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + return Download( + resp, + _get_http_response_filename(resp, link), + _prepare_download(resp, link, self._progress_bar), + ) diff --git a/pipenv/patched/notpip/_internal/network/session.py b/pipenv/patched/notpip/_internal/network/session.py index 178c045764..d2bee7d2d4 100644 --- a/pipenv/patched/notpip/_internal/network/session.py +++ b/pipenv/patched/notpip/_internal/network/session.py @@ -26,8 +26,7 @@ from pipenv.patched.notpip._internal.network.auth import MultiDomainBasicAuth from pipenv.patched.notpip._internal.network.cache import SafeFileCache # Import ssl from compat so the initial import occurs in only one place. -from pipenv.patched.notpip._internal.utils.compat import HAS_TLS, ipaddress, ssl -from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner +from pipenv.patched.notpip._internal.utils.compat import has_tls, ipaddress from pipenv.patched.notpip._internal.utils.glibc import libc_ver from pipenv.patched.notpip._internal.utils.misc import ( build_url_from_netloc, @@ -153,7 +152,8 @@ def user_agent(): if platform.machine(): data["cpu"] = platform.machine() - if HAS_TLS: + if has_tls(): + import _ssl as ssl data["openssl_version"] = ssl.OPENSSL_VERSION setuptools_version = get_installed_version("setuptools") @@ -212,8 +212,9 @@ def close(self): class InsecureHTTPAdapter(HTTPAdapter): def cert_verify(self, conn, url, verify, cert): - conn.cert_reqs = 'CERT_NONE' - conn.ca_certs = None + super(InsecureHTTPAdapter, self).cert_verify( + conn=conn, url=url, verify=False, cert=cert + ) class PipSession(requests.Session): @@ -262,19 +263,6 @@ def __init__(self, *args, **kwargs): backoff_factor=0.25, ) - # Check to ensure that the directory containing our cache directory - # is owned by the user current executing pip. If it does not exist - # we will check the parent directory until we find one that does exist. - if cache and not check_path_owner(cache): - logger.warning( - "The directory '%s' or its parent directory is not owned by " - "the current user and the cache has been disabled. Please " - "check the permissions and owner of that directory. If " - "executing pip with sudo, you may want sudo's -H flag.", - cache, - ) - cache = None - # We want to _only_ cache responses on securely fetched origins. We do # this because we can't validate the response of an insecurely fetched # origin, and we don't want someone to be able to poison the cache and @@ -360,22 +348,13 @@ def is_secure_origin(self, location): continue try: - # We need to do this decode dance to ensure that we have a - # unicode object, even on Python 2.x. addr = ipaddress.ip_address( - origin_host - if ( - isinstance(origin_host, six.text_type) or - origin_host is None - ) - else origin_host.decode("utf8") + None + if origin_host is None + else six.ensure_text(origin_host) ) network = ipaddress.ip_network( - secure_host - if isinstance(secure_host, six.text_type) - # setting secure_host to proper Union[bytes, str] - # creates problems in other places - else secure_host.decode("utf8") # type: ignore + six.ensure_text(secure_host) ) except ValueError: # We don't have both a valid address or a valid network, so diff --git a/pipenv/patched/notpip/_internal/network/utils.py b/pipenv/patched/notpip/_internal/network/utils.py new file mode 100644 index 0000000000..463f6c55e5 --- /dev/null +++ b/pipenv/patched/notpip/_internal/network/utils.py @@ -0,0 +1,48 @@ +from pipenv.patched.notpip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator + + +def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): + # type: (Response, int) -> Iterator[bytes] + """Given a requests Response, provide the data chunks. + """ + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/pipenv/patched/notpip/_internal/operations/check.py b/pipenv/patched/notpip/_internal/operations/check.py index 9f2fb18717..01986fda4e 100644 --- a/pipenv/patched/notpip/_internal/operations/check.py +++ b/pipenv/patched/notpip/_internal/operations/check.py @@ -53,7 +53,7 @@ def create_package_set_from_installed(**kwargs): package_set[name] = PackageDetails(dist.version, dist.requires()) except RequirementParseError as e: # Don't crash on broken metadata - logging.warning("Error parsing requirements for %s: %s", name, e) + logger.warning("Error parsing requirements for %s: %s", name, e) problems = True return package_set, problems diff --git a/pipenv/patched/notpip/_internal/operations/freeze.py b/pipenv/patched/notpip/_internal/operations/freeze.py index 0fe5399f96..4df4de2f6f 100644 --- a/pipenv/patched/notpip/_internal/operations/freeze.py +++ b/pipenv/patched/notpip/_internal/operations/freeze.py @@ -80,7 +80,7 @@ def freeze( continue if exclude_editable and req.editable: continue - installations[req.name] = req + installations[req.canonical_name] = req if requirement: # the options that don't get turned into an InstallRequirement @@ -139,22 +139,27 @@ def freeze( " (add #egg=PackageName to the URL to avoid" " this warning)" ) - elif line_req.name not in installations: - # either it's not installed, or it is installed - # but has been processed already - if not req_files[line_req.name]: - logger.warning( - "Requirement file [%s] contains %s, but " - "package %r is not installed", - req_file_path, - COMMENT_RE.sub('', line).strip(), line_req.name - ) + else: + line_req_canonical_name = canonicalize_name( + line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + line_req.name + ) + else: + req_files[line_req.name].append(req_file_path) else: + yield str(installations[ + line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] req_files[line_req.name].append(req_file_path) - else: - yield str(installations[line_req.name]).rstrip() - del installations[line_req.name] - req_files[line_req.name].append(req_file_path) # Warn about requirements that were included multiple times (in a # single requirements file or in different requirements files). @@ -169,7 +174,7 @@ def freeze( ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): - if canonicalize_name(installation.name) not in skip: + if installation.canonical_name not in skip: yield str(installation).rstrip() @@ -239,6 +244,7 @@ class FrozenRequirement(object): def __init__(self, name, req, editable, comments=()): # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None self.name = name + self.canonical_name = canonicalize_name(name) self.req = req self.editable = editable self.comments = comments diff --git a/pipenv/patched/notpip/_internal/operations/install/__init__.py b/pipenv/patched/notpip/_internal/operations/install/__init__.py new file mode 100644 index 0000000000..24d6a5dd31 --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/pipenv/patched/notpip/_internal/operations/install/editable_legacy.py b/pipenv/patched/notpip/_internal/operations/install/editable_legacy.py new file mode 100644 index 0000000000..9b054a2071 --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,52 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging + +from pipenv.patched.notpip._internal.utils.logging import indent_log +from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_develop_args +from pipenv.patched.notpip._internal.utils.subprocess import call_subprocess +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pipenv.patched.notpip._internal.build_env import BuildEnvironment + + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options, # type: List[str] + global_options, # type: Sequence[str] + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + name, # type: str + setup_py_path, # type: str + isolated, # type: bool + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str +): + # type: (...) -> None + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info('Running setup.py develop for %s', name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + cwd=unpacked_source_directory, + ) diff --git a/pipenv/patched/notpip/_internal/operations/install/legacy.py b/pipenv/patched/notpip/_internal/operations/install/legacy.py new file mode 100644 index 0000000000..d38092a0c9 --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/install/legacy.py @@ -0,0 +1,129 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +from distutils.util import change_root + +from pipenv.patched.notpip._internal.utils.deprecation import deprecated +from pipenv.patched.notpip._internal.utils.logging import indent_log +from pipenv.patched.notpip._internal.utils.misc import ensure_dir +from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_install_args +from pipenv.patched.notpip._internal.utils.subprocess import runner_with_spinner_message +from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pipenv.patched.notpip._internal.models.scheme import Scheme + from pipenv.patched.notpip._internal.req.req_install import InstallRequirement + + +logger = logging.getLogger(__name__) + + +def install( + install_req, # type: InstallRequirement + install_options, # type: List[str] + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + use_user_site, # type: bool + pycompile, # type: bool + scheme, # type: Scheme +): + # type: (...) -> None + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options = list(global_options) + \ + install_req.options.get('global_options', []) + install_options = list(install_options) + \ + install_req.options.get('install_options', []) + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + record_filename = os.path.join(temp_dir.path, 'install-record.txt') + install_args = make_setuptools_install_args( + install_req.setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=install_req.isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + "Running setup.py install for {}".format(install_req.name) + ) + with indent_log(), install_req.build_env: + runner( + cmd=install_args, + cwd=install_req.unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + return + install_req.install_succeeded = True + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + def prepend_root(path): + # type: (str) -> str + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + deprecated( + reason=( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(install_req), + replacement=( + "for maintainers: updating the setup.py of {0}. " + "For users: contact the maintainers of {0} to let " + "them know to update their setup.py.".format( + install_req.name + ) + ), + gone_in="20.2", + issue=6998, + ) + # FIXME: put the record somewhere + return + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath(prepend_root(filename), egg_info_dir) + ) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') diff --git a/pipenv/patched/notpip/_internal/operations/install/wheel.py b/pipenv/patched/notpip/_internal/operations/install/wheel.py new file mode 100644 index 0000000000..91b6b4f10f --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/install/wheel.py @@ -0,0 +1,615 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import collections +import compileall +import csv +import logging +import os.path +import re +import shutil +import stat +import sys +import warnings +from base64 import urlsafe_b64encode +from zipfile import ZipFile + +from pipenv.patched.notpip._vendor import pkg_resources +from pipenv.patched.notpip._vendor.distlib.scripts import ScriptMaker +from pipenv.patched.notpip._vendor.distlib.util import get_export_entry +from pipenv.patched.notpip._vendor.six import StringIO + +from pipenv.patched.notpip._internal.exceptions import InstallationError +from pipenv.patched.notpip._internal.locations import get_major_minor_version +from pipenv.patched.notpip._internal.utils.misc import captured_stdout, ensure_dir, hash_file +from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.unpacking import unpack_file +from pipenv.patched.notpip._internal.utils.wheel import parse_wheel + +if MYPY_CHECK_RUNNING: + from email.message import Message + from typing import ( + Dict, List, Optional, Sequence, Tuple, IO, Text, Any, + Iterable, Callable, Set, + ) + + from pipenv.patched.notpip._internal.models.scheme import Scheme + + InstalledCSVRow = Tuple[str, ...] + + +logger = logging.getLogger(__name__) + + +def normpath(src, p): + # type: (str, str) -> str + return os.path.relpath(src, p).replace(os.path.sep, '/') + + +def rehash(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[str, str] + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + # unicode/str python2 issues + return (digest, str(length)) # type: ignore + + +def open_for_csv(name, mode): + # type: (str, Text) -> IO[Any] + if sys.version_info[0] < 3: + nl = {} # type: Dict[str, Any] + bin = 'b' + else: + nl = {'newline': ''} # type: Dict[str, Any] + bin = '' + return open(name, mode + bin, **nl) + + +def fix_script(path): + # type: (str) -> Optional[bool] + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + if os.path.isfile(path): + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + return None + + +def wheel_root_is_purelib(metadata): + # type: (Message) -> bool + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(filename): + # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] + if not os.path.exists(filename): + return {}, {} + + # This is done because you can pass a string to entry_points wrappers which + # means that they may or may not be valid INI files. The attempt here is to + # strip leading and trailing whitespace in order to make them valid INI + # files. + with open(filename) as fp: + data = StringIO() + for line in fp: + data.write(line.strip()) + data.write("\n") + data.seek(0) + + # get the entry points and then the script names + entry_points = pkg_resources.EntryPoint.parse_map(data) + console = entry_points.get('console_scripts', {}) + gui = entry_points.get('gui_scripts', {}) + + def _split_ep(s): + # type: (pkg_resources.EntryPoint) -> Tuple[str, str] + """get the string representation of EntryPoint, + remove space and split on '=' + """ + split_parts = str(s).replace(" ", "").split("=") + return split_parts[0], split_parts[1] + + # convert the EntryPoint objects into strings with module:function + console = dict(_split_ep(v) for v in console.values()) + gui = dict(_split_ep(v) for v in gui.values()) + return console, gui + + +def message_about_scripts_not_on_PATH(scripts): + # type: (Sequence[str]) -> Optional[str] + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for = { + parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } # type: Dict[str, Set[str]] + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts = sorted(dir_scripts) # type: List[str] + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH." + .format(start_text, parent_dir) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def sorted_outrows(outrows): + # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] + """Return the given rows of a RECORD file in sorted order. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) + + +def get_csv_rows_for_installed( + old_csv_rows, # type: Iterable[List[str]] + installed, # type: Dict[str, str] + changed, # type: Set[str] + generated, # type: List[str] + lib_dir, # type: str +): + # type: (...) -> List[InstalledCSVRow] + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows = [] # type: List[InstalledCSVRow] + for row in old_csv_rows: + if len(row) > 3: + logger.warning( + 'RECORD line has more than three elements: {}'.format(row) + ) + # Make a copy because we are mutating the row. + row = list(row) + old_path = row[0] + new_path = installed.pop(old_path, old_path) + row[0] = new_path + if new_path in changed: + digest, length = rehash(new_path) + row[1] = digest + row[2] = length + installed_rows.append(tuple(row)) + for f in generated: + digest, length = rehash(f) + installed_rows.append((normpath(f, lib_dir), digest, str(length))) + for f in installed: + installed_rows.append((installed[f], '', '')) + return installed_rows + + +class MissingCallableSuffix(Exception): + pass + + +def _raise_for_invalid_entrypoint(specification): + # type: (str) -> None + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification, options=None): + # type: (str, Dict[str, Any]) -> List[str] + _raise_for_invalid_entrypoint(specification) + return super(PipScriptMaker, self).make(specification, options) + + +def install_unpacked_wheel( + name, # type: str + wheeldir, # type: str + wheel_zip, # type: ZipFile + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True # type: bool +): + # type: (...) -> None + """Install a wheel. + + :param name: Name of the project to install + :param wheeldir: Base directory of the unpacked wheel + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + # TODO: Investigate and break this up. + # TODO: Look into moving this into a dedicated class for representing an + # installation. + + source = wheeldir.rstrip(os.path.sep) + os.path.sep + + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + subdirs = os.listdir(source) + data_dirs = [s for s in subdirs if s.endswith('.data')] + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} # type: Dict[str, str] + changed = set() + generated = [] # type: List[str] + + # Compile all of the pyc files that we're going to be installing + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + compileall.compile_dir(source, force=True, quiet=True) + logger.debug(stdout.getvalue()) + + def record_installed(srcfile, destfile, modified=False): + # type: (str, str, bool) -> None + """Map archive RECORD paths to installation RECORD paths.""" + oldpath = normpath(srcfile, wheeldir) + newpath = normpath(destfile, lib_dir) + installed[oldpath] = newpath + if modified: + changed.add(destfile) + + def clobber( + source, # type: str + dest, # type: str + is_base, # type: bool + fixer=None, # type: Optional[Callable[[str], Any]] + filter=None # type: Optional[Callable[[str], bool]] + ): + # type: (...) -> None + ensure_dir(dest) # common for the 'include' path + + for dir, subdirs, files in os.walk(source): + basedir = dir[len(source):].lstrip(os.path.sep) + destdir = os.path.join(dest, basedir) + if is_base and basedir == '': + subdirs[:] = [s for s in subdirs if not s.endswith('.data')] + for f in files: + # Skip unwanted files + if filter and filter(f): + continue + srcfile = os.path.join(dir, f) + destfile = os.path.join(dest, basedir, f) + # directory creation is lazy and after the file filtering above + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + ensure_dir(destdir) + + # copyfile (called below) truncates the destination if it + # exists and then writes the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(destfile): + os.unlink(destfile) + + # We use copyfile (not move, copy, or copy2) to be extra sure + # that we are not moving directories over (copyfile fails for + # directories) as well as to ensure that we are not copying + # over any metadata because we want more control over what + # metadata we actually copy over. + shutil.copyfile(srcfile, destfile) + + # Copy over the metadata for the file, currently this only + # includes the atime and mtime. + st = os.stat(srcfile) + if hasattr(os, "utime"): + os.utime(destfile, (st.st_atime, st.st_mtime)) + + # If our file is executable, then make our destination file + # executable. + if os.access(srcfile, os.X_OK): + st = os.stat(srcfile) + permissions = ( + st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + ) + os.chmod(destfile, permissions) + + changed = False + if fixer: + changed = fixer(destfile) + record_installed(srcfile, destfile, changed) + + clobber(source, lib_dir, True) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Get the defined entry points + ep_file = os.path.join(dest_info_dir, 'entry_points.txt') + console, gui = get_entrypoints(ep_file) + + def is_entrypoint_wrapper(name): + # type: (str) -> bool + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + for datadir in data_dirs: + fixer = None + filter = None + for subdir in os.listdir(os.path.join(wheeldir, datadir)): + fixer = None + if subdir == 'scripts': + fixer = fix_script + filter = is_entrypoint_wrapper + source = os.path.join(wheeldir, datadir, subdir) + dest = getattr(scheme, subdir) + clobber(source, dest, False, fixer=fixer, filter=filter) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {''} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append('pip = ' + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + 'pip%s = %s' % (sys.version_info[0], pip_script) + ) + + scripts_to_generate.append( + 'pip%s = %s' % (get_major_minor_version(), pip_script) + ) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append( + 'easy_install = ' + easy_install_script + ) + + scripts_to_generate.append( + 'easy_install-%s = %s' % ( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate.extend( + '%s = %s' % kv for kv in console.items() + ) + + gui_scripts_to_generate = [ + '%s = %s' % kv for kv in gui.items() + ] + + generated_console_scripts = [] # type: List[str] + + try: + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend( + maker.make_multiple(gui_scripts_to_generate, {'gui': True}) + ) + except MissingCallableSuffix as e: + entry = e.args[0] + raise InstallationError( + "Invalid script entry point: {} for req: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry, req_description) + ) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + # Record pip as the installer + installer = os.path.join(dest_info_dir, 'INSTALLER') + temp_installer = os.path.join(dest_info_dir, 'INSTALLER.pip') + with open(temp_installer, 'wb') as installer_file: + installer_file.write(b'pip\n') + shutil.move(temp_installer, installer) + generated.append(installer) + + # Record details of all files installed + record = os.path.join(dest_info_dir, 'RECORD') + temp_record = os.path.join(dest_info_dir, 'RECORD.pip') + with open_for_csv(record, 'r') as record_in: + with open_for_csv(temp_record, 'w+') as record_out: + reader = csv.reader(record_in) + outrows = get_csv_rows_for_installed( + reader, installed=installed, changed=changed, + generated=generated, lib_dir=lib_dir, + ) + writer = csv.writer(record_out) + # Sort to simplify testing. + for row in sorted_outrows(outrows): + writer.writerow(row) + shutil.move(temp_record, record) + + +def install_wheel( + name, # type: str + wheel_path, # type: str + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True, # type: bool + _temp_dir_for_testing=None, # type: Optional[str] +): + # type: (...) -> None + with TempDirectory( + path=_temp_dir_for_testing, kind="unpacked-wheel" + ) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z: + unpack_file(wheel_path, unpacked_dir.path) + install_unpacked_wheel( + name=name, + wheeldir=unpacked_dir.path, + wheel_zip=z, + scheme=scheme, + req_description=req_description, + pycompile=pycompile, + warn_script_location=warn_script_location, + ) diff --git a/pipenv/patched/notpip/_internal/operations/prepare.py b/pipenv/patched/notpip/_internal/operations/prepare.py index 6128f9b965..29ab93b7f6 100644 --- a/pipenv/patched/notpip/_internal/operations/prepare.py +++ b/pipenv/patched/notpip/_internal/operations/prepare.py @@ -3,45 +3,91 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False import logging +import mimetypes import os +import shutil +import sys from pipenv.patched.notpip._vendor import requests +from pipenv.patched.notpip._vendor.six import PY2 from pipenv.patched.notpip._internal.distributions import ( make_distribution_for_install_requirement, ) from pipenv.patched.notpip._internal.distributions.installed import InstalledDistribution -from pipenv.patched.notpip._internal.download import unpack_url from pipenv.patched.notpip._internal.exceptions import ( DirectoryUrlHashUnsupported, + HashMismatch, HashUnpinned, InstallationError, PreviousBuildDirError, VcsHashUnsupported, ) -from pipenv.patched.notpip._internal.utils.compat import expanduser +from pipenv.patched.notpip._internal.utils.filesystem import copy2_fixed from pipenv.patched.notpip._internal.utils.hashes import MissingHashes from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.marker_files import write_delete_marker_file -from pipenv.patched.notpip._internal.utils.misc import display_path, normalize_path +from pipenv.patched.notpip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + hide_url, + path_to_display, + rmtree, +) +from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.unpacking import unpack_file +from pipenv.patched.notpip._internal.vcs import vcs if MYPY_CHECK_RUNNING: - from typing import Optional + from typing import ( + Callable, List, Optional, Tuple, + ) + + from mypy_extensions import TypedDict from pipenv.patched.notpip._internal.distributions import AbstractDistribution - from pipenv.patched.notpip._internal.index import PackageFinder - from pipenv.patched.notpip._internal.network.session import PipSession + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder + from pipenv.patched.notpip._internal.models.link import Link + from pipenv.patched.notpip._internal.network.download import Downloader from pipenv.patched.notpip._internal.req.req_install import InstallRequirement from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker + from pipenv.patched.notpip._internal.utils.hashes import Hashes + + if PY2: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'ignore': Callable[[str, List[str]], List[str]], + 'symlinks': bool, + }, + total=False, + ) + else: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'copy_function': Callable[[str, str], None], + 'ignore': Callable[[str, List[str]], List[str]], + 'ignore_dangling_symlinks': bool, + 'symlinks': bool, + }, + total=False, + ) logger = logging.getLogger(__name__) -def _get_prepared_distribution(req, req_tracker, finder, build_isolation): +def _get_prepared_distribution( + req, # type: InstallRequirement + req_tracker, # type: RequirementTracker + finder, # type: PackageFinder + build_isolation # type: bool +): + # type: (...) -> AbstractDistribution """Prepare a distribution for installation. """ abstract_dist = make_distribution_for_install_requirement(req) @@ -50,6 +96,245 @@ def _get_prepared_distribution(req, req_tracker, finder, build_isolation): return abstract_dist +def unpack_vcs_link(link, location): + # type: (Link, str) -> None + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url)) + + +def _copy_file(filename, location, link): + # type: (str, str, Link) -> None + copy = True + download_location = os.path.join(location, link.filename) + if os.path.exists(download_location): + response = ask_path_exists( + 'The file {} exists. (i)gnore, (w)ipe, (b)ackup, (a)abort'.format( + display_path(download_location) + ), + ('i', 'w', 'b', 'a'), + ) + if response == 'i': + copy = False + elif response == 'w': + logger.warning('Deleting %s', display_path(download_location)) + os.remove(download_location) + elif response == 'b': + dest_file = backup_dir(download_location) + logger.warning( + 'Backing up %s to %s', + display_path(download_location), + display_path(dest_file), + ) + shutil.move(download_location, dest_file) + elif response == 'a': + sys.exit(-1) + if copy: + shutil.copy(filename, download_location) + logger.info('Saved %s', display_path(download_location)) + + +def unpack_http_url( + link, # type: Link + location, # type: str + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> str + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url( + link, downloader, temp_dir.path, hashes + ) + + # unpack the archive to the build dir location. even when only + # downloading archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type) + + return from_path + + +def _copy2_ignoring_special_files(src, dest): + # type: (str, str) -> None + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + path_to_display(src), + path_to_display(dest), + ) + + +def _copy_source_tree(source, target): + # type: (str, str) -> None + def ignore(d, names): + # type: (str, List[str]) -> List[str] + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + return ['.tox', '.nox'] if d == source else [] + + kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs + + if not PY2: + # Python 2 does not support copy_function, so we only ignore + # errors on special file copy in Python 3. + kwargs['copy_function'] = _copy2_ignoring_special_files + + shutil.copytree(source, target, **kwargs) + + +def unpack_file_url( + link, # type: Link + location, # type: str + download_dir=None, # type: Optional[str] + hashes=None # type: Optional[Hashes] +): + # type: (...) -> Optional[str] + """Unpack link into location. + """ + link_path = link.file_path + # If it's a url to a local directory + if link.is_existing_dir(): + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link_path, location) + return None + + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + + content_type = mimetypes.guess_type(from_path)[0] + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type) + + return from_path + + +def unpack_url( + link, # type: Link + location, # type: str + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> Optional[str] + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location) + return None + + # file urls + elif link.is_file: + return unpack_file_url(link, location, download_dir, hashes=hashes) + + # http urls + else: + return unpack_http_url( + link, + location, + downloader, + download_dir, + hashes=hashes, + ) + + +def _download_http_url( + link, # type: Link + downloader, # type: Downloader + temp_dir, # type: str + hashes, # type: Optional[Hashes] +): + # type: (...) -> Tuple[str, str] + """Download link url into temp_dir using provided session""" + download = downloader(link) + + file_path = os.path.join(temp_dir, download.filename) + with open(file_path, 'wb') as content_file: + for chunk in download.chunks: + content_file.write(chunk) + + if hashes: + hashes.check_against_path(file_path) + + return file_path, download.response.headers.get('content-type', '') + + +def _check_download_dir(link, download_dir, hashes): + # type: (Link, str, Optional[Hashes]) -> Optional[str] + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path + + class RequirementPreparer(object): """Prepares a Requirement """ @@ -60,9 +345,12 @@ def __init__( download_dir, # type: Optional[str] src_dir, # type: str wheel_download_dir, # type: Optional[str] - progress_bar, # type: str build_isolation, # type: bool - req_tracker # type: RequirementTracker + req_tracker, # type: RequirementTracker + downloader, # type: Downloader + finder, # type: PackageFinder + require_hashes, # type: bool + use_user_site, # type: bool ): # type: (...) -> None super(RequirementPreparer, self).__init__() @@ -70,18 +358,16 @@ def __init__( self.src_dir = src_dir self.build_dir = build_dir self.req_tracker = req_tracker + self.downloader = downloader + self.finder = finder # Where still-packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. - if download_dir: - download_dir = expanduser(download_dir) self.download_dir = download_dir # Where still-packed .whl files should be written to. If None, they are # written to the download_dir parameter. Separate to download_dir to # permit only keeping wheel archives for pip wheel. - if wheel_download_dir: - wheel_download_dir = normalize_path(wheel_download_dir) self.wheel_download_dir = wheel_download_dir # NOTE @@ -89,11 +375,15 @@ def __init__( # be combined if we're willing to have non-wheel archives present in # the wheelhouse output by 'pip wheel'. - self.progress_bar = progress_bar - # Is build isolation allowed? self.build_isolation = build_isolation + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + @property def _download_should_save(self): # type: () -> bool @@ -105,15 +395,12 @@ def _download_should_save(self): logger.critical('Could not find download directory') raise InstallationError( - "Could not find or access download directory '%s'" - % display_path(self.download_dir)) + "Could not find or access download directory '{}'" + .format(self.download_dir)) def prepare_linked_requirement( self, req, # type: InstallRequirement - session, # type: PipSession - finder, # type: PackageFinder - require_hashes, # type: bool ): # type: (...) -> AbstractDistribution """Prepare a requirement that would be obtained from req.link @@ -133,6 +420,8 @@ def prepare_linked_requirement( # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory + # Since source_dir is only set for editable requirements. + assert req.source_dir is None req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the @@ -146,7 +435,7 @@ def prepare_linked_requirement( # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) - if require_hashes: + if self.require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for @@ -166,8 +455,8 @@ def prepare_linked_requirement( # about them not being pinned. raise HashUnpinned() - hashes = req.hashes(trust_internet=not require_hashes) - if require_hashes and not hashes: + hashes = req.hashes(trust_internet=not self.require_hashes) + if self.require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception @@ -181,10 +470,9 @@ def prepare_linked_requirement( download_dir = self.wheel_download_dir try: - unpack_url( - link, req.source_dir, download_dir, - session=session, hashes=hashes, - progress_bar=self.progress_bar + local_path = unpack_url( + link, req.source_dir, self.downloader, download_dir, + hashes=hashes, ) except requests.HTTPError as exc: logger.critical( @@ -193,11 +481,15 @@ def prepare_linked_requirement( exc, ) raise InstallationError( - 'Could not install requirement %s because of HTTP ' - 'error %s for URL %s' % - (req, exc, link) + 'Could not install requirement {} because of HTTP ' + 'error {} for URL {}'.format(req, exc, link) ) + # For use in later processing, preserve the file path on the + # requirement. + if local_path: + req.local_file_path = local_path + if link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get @@ -214,9 +506,17 @@ def prepare_linked_requirement( write_delete_marker_file(req.source_dir) abstract_dist = _get_prepared_distribution( - req, self.req_tracker, finder, self.build_isolation, + req, self.req_tracker, self.finder, self.build_isolation, ) + if download_dir: + if link.is_existing_dir(): + logger.info('Link is a directory, ignoring download_dir') + elif local_path and not os.path.exists( + os.path.join(download_dir, link.filename) + ): + _copy_file(local_path, download_dir, link) + if self._download_should_save: # Make a .zip of the source_dir we already created. if link.is_vcs: @@ -226,9 +526,6 @@ def prepare_linked_requirement( def prepare_editable_requirement( self, req, # type: InstallRequirement - require_hashes, # type: bool - use_user_site, # type: bool - finder # type: PackageFinder ): # type: (...) -> AbstractDistribution """Prepare an editable requirement @@ -238,29 +535,28 @@ def prepare_editable_requirement( logger.info('Obtaining %s', req) with indent_log(): - if require_hashes: + if self.require_hashes: raise InstallationError( - 'The editable requirement %s cannot be installed when ' + 'The editable requirement {} cannot be installed when ' 'requiring hashes, because there is no single file to ' - 'hash.' % req + 'hash.'.format(req) ) req.ensure_has_source_dir(self.src_dir) req.update_editable(not self._download_should_save) abstract_dist = _get_prepared_distribution( - req, self.req_tracker, finder, self.build_isolation, + req, self.req_tracker, self.finder, self.build_isolation, ) if self._download_should_save: req.archive(self.download_dir) - req.check_if_exists(use_user_site) + req.check_if_exists(self.use_user_site) return abstract_dist def prepare_installed_requirement( self, req, # type: InstallRequirement - require_hashes, # type: bool skip_reason # type: str ): # type: (...) -> AbstractDistribution @@ -269,14 +565,14 @@ def prepare_installed_requirement( assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( "did not get skip reason skipped but req.satisfied_by " - "is set to %r" % (req.satisfied_by,) + "is set to {}".format(req.satisfied_by) ) logger.info( 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version ) with indent_log(): - if require_hashes: + if self.require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' diff --git a/pipenv/patched/notpip/_internal/pep425tags.py b/pipenv/patched/notpip/_internal/pep425tags.py index 16d041d95d..49943e2c03 100644 --- a/pipenv/patched/notpip/_internal/pep425tags.py +++ b/pipenv/patched/notpip/_internal/pep425tags.py @@ -1,345 +1,121 @@ """Generate and work with PEP 425 Compatibility Tags.""" from __future__ import absolute_import -import distutils.util import logging -import os -import platform import re -import sys -import sysconfig -import warnings -from collections import OrderedDict -import pipenv.patched.notpip._internal.utils.glibc -from pipenv.patched.notpip._internal.utils.compat import get_extension_suffixes +from pipenv.patched.notpip._vendor.packaging.tags import ( + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ( - Tuple, Callable, List, Optional, Union, Dict, Set - ) + from typing import List, Optional, Tuple - Pep425Tag = Tuple[str, str, str] + from pipenv.patched.notpip._vendor.packaging.tags import PythonVersion logger = logging.getLogger(__name__) _osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') -def get_config_var(var): - # type: (str) -> Optional[str] - try: - return sysconfig.get_config_var(var) - except IOError as e: # Issue #1074 - warnings.warn("{}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - # type: () -> str - """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl - - def version_info_to_nodot(version_info): # type: (Tuple[int, ...]) -> str # Only use up to the first two numbers. return ''.join(map(str, version_info[:2])) -def get_impl_ver(): - # type: () -> str - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - # type: () -> Tuple[int, ...] - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - # attrs exist only on pypy - return (sys.version_info[0], - sys.pypy_version_info.major, # type: ignore - sys.pypy_version_info.minor) # type: ignore +def _mac_platforms(arch): + # type: (str) -> List[str] + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + '{}_{}'.format(name, arch[len('macosx_'):]) + for arch in mac_platforms(mac_version, actual_arch) + ] else: - return sys.version_info[0], sys.version_info[1] - - -def get_impl_tag(): - # type: () -> str - """ - Returns the Tag for this specific implementation. - """ - return "{}{}".format(get_abbr_impl(), get_impl_ver()) - - -def get_flag(var, fallback, expected=True, warn=True): - # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - logger.debug("Config variable '%s' is unset, Python ABI tag may " - "be incorrect", var) - return fallback() - return val == expected - - -def get_abi_tag(): - # type: () -> Optional[str] - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - abi = None # type: Optional[str] - - if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - is_cpython = (impl == 'cp') - if get_flag( - 'Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), - warn=is_cpython): - d = 'd' - if sys.version_info < (3, 8) and get_flag( - 'WITH_PYMALLOC', lambda: is_cpython, warn=is_cpython): - m = 'm' - if sys.version_info < (3, 3) and get_flag( - 'Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, - expected=4, warn=is_cpython): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - - return abi - - -def _is_running_32bit(): - # type: () -> bool - return sys.maxsize == 2147483647 - - -def get_platform(): - # type: () -> str - """Return our platform name 'win32', 'linux_x86_64'""" - if sys.platform == 'darwin': - # distutils.util.get_platform() returns the release based on the value - # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may - # be significantly older than the user's current machine. - release, _, machine = platform.mac_ver() - split_ver = release.split('.') - - if machine == "x86_64" and _is_running_32bit(): - machine = "i386" - elif machine == "ppc64" and _is_running_32bit(): - machine = "ppc" - - return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) + # arch pattern didn't match (?!) + arches = [arch] + return arches - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and _is_running_32bit(): - # 32 bit Python program (running on a 64 bit Linux): pip should only - # install and run 32 bit compiled extensions in that case. - result = "linux_i686" - return result +def _custom_manylinux_platforms(arch): + # type: (str) -> List[str] + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch_prefix == 'manylinux2014': + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {'i686', 'x86_64'}: + arches.append('manylinux2010' + arch_sep + arch_suffix) + arches.append('manylinux1' + arch_sep + arch_suffix) + elif arch_prefix == 'manylinux2010': + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append('manylinux1' + arch_sep + arch_suffix) + return arches -def is_linux_armhf(): - # type: () -> bool - if get_platform() != "linux_armv7l": - return False - # hard-float ABI can be detected from the ELF header of the running - # process - sys_executable = os.environ.get('PIP_PYTHON_PATH', sys.executable) - try: - with open(sys_executable, 'rb') as f: - elf_header_raw = f.read(40) # read 40 first bytes of ELF header - except (IOError, OSError, TypeError): - return False - if elf_header_raw is None or len(elf_header_raw) < 40: - return False - if isinstance(elf_header_raw, str): - elf_header = [ord(c) for c in elf_header_raw] +def _get_custom_platforms(arch): + # type: (str) -> List[str] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch.startswith('macosx'): + arches = _mac_platforms(arch) + elif arch_prefix in ['manylinux2014', 'manylinux2010']: + arches = _custom_manylinux_platforms(arch) else: - elf_header = [b for b in elf_header_raw] - result = elf_header[0:4] == [0x7f, 0x45, 0x4c, 0x46] # ELF magic number - result &= elf_header[4:5] == [1] # 32-bit ELF - result &= elf_header[5:6] == [1] # little-endian - result &= elf_header[18:20] == [0x28, 0] # ARM machine - result &= elf_header[39:40] == [5] # ARM EABIv5 - result &= (elf_header[37:38][0] & 4) == 4 # EF_ARM_ABI_FLOAT_HARD - return result - - -def is_manylinux1_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux1_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 5 uses glibc 2.5. - return pipenv.patched.notpip._internal.utils.glibc.have_compatible_glibc(2, 5) - - -def is_manylinux2010_compatible(): - # type: () -> bool - # Only Linux, and only x86-64 / i686 - if get_platform() not in {"linux_x86_64", "linux_i686"}: - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux2010_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 6 uses glibc 2.12. - return pipenv.patched.notpip._internal.utils.glibc.have_compatible_glibc(2, 12) - - -def is_manylinux2014_compatible(): - # type: () -> bool - # Only Linux, and only supported architectures - platform = get_platform() - if platform not in {"linux_x86_64", "linux_i686", "linux_aarch64", - "linux_armv7l", "linux_ppc64", "linux_ppc64le", - "linux_s390x"}: - return False - - # check for hard-float ABI in case we're running linux_armv7l not to - # install hard-float ABI wheel in a soft-float ABI environment - if platform == "linux_armv7l" and not is_linux_armhf(): - return False - - # Check for presence of _manylinux module - try: - import _manylinux - return bool(_manylinux.manylinux2014_compatible) - except (ImportError, AttributeError): - # Fall through to heuristic check below - pass - - # Check glibc version. CentOS 7 uses glibc 2.17. - return pipenv.patched.notpip._internal.utils.glibc.have_compatible_glibc(2, 17) - - -def get_darwin_arches(major, minor, machine): - # type: (int, int, str) -> List[str] - """Return a list of supported arches (including group arches) for - the given major, minor and machine architecture of an macOS machine. - """ - arches = [] - - def _supports_arch(major, minor, arch): - # type: (int, int, str) -> bool - # Looking at the application support for macOS versions in the chart - # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears - # our timeline looks roughly like: - # - # 10.0 - Introduces ppc support. - # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 - # and x86_64 support is CLI only, and cannot be used for GUI - # applications. - # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. - # 10.6 - Drops support for ppc64 - # 10.7 - Drops support for ppc - # - # Given that we do not know if we're installing a CLI or a GUI - # application, we must be conservative and assume it might be a GUI - # application and behave as if ppc64 and x86_64 support did not occur - # until 10.5. - # - # Note: The above information is taken from the "Application support" - # column in the chart not the "Processor support" since I believe - # that we care about what instruction sets an application can use - # not which processors the OS supports. - if arch == 'ppc': - return (major, minor) <= (10, 5) - if arch == 'ppc64': - return (major, minor) == (10, 5) - if arch == 'i386': - return (major, minor) >= (10, 4) - if arch == 'x86_64': - return (major, minor) >= (10, 5) - if arch in groups: - for garch in groups[arch]: - if _supports_arch(major, minor, garch): - return True - return False - - groups = OrderedDict([ - ("fat", ("i386", "ppc")), - ("intel", ("x86_64", "i386")), - ("fat64", ("x86_64", "ppc64")), - ("fat32", ("x86_64", "i386", "ppc")), - ]) # type: Dict[str, Tuple[str, ...]] - - if _supports_arch(major, minor, machine): - arches.append(machine) - - for garch in groups: - if machine in groups[garch] and _supports_arch(major, minor, garch): - arches.append(garch) + arches = [arch] + return arches - arches.append('universal') - return arches +def _get_python_version(version): + # type: (str) -> PythonVersion + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) -def get_all_minor_versions_as_strings(version_info): - # type: (Tuple[int, ...]) -> List[str] - versions = [] - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - return versions +def _get_custom_interpreter(implementation=None, version=None): + # type: (Optional[str], Optional[str]) -> str + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return "{}{}".format(implementation, version) def get_supported( - versions=None, # type: Optional[List[str]] - noarch=False, # type: bool + version=None, # type: Optional[str] platform=None, # type: Optional[str] impl=None, # type: Optional[str] abi=None # type: Optional[str] ): - # type: (...) -> List[Pep425Tag] + # type: (...) -> List[Tag] """Return a list of supported tags for each version specified in `versions`. - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. :param platform: specify the exact platform you want valid tags for, or None. If None, use the local system platform. :param impl: specify the exact implementation you want valid @@ -347,105 +123,45 @@ def get_supported( :param abi: specify the exact abi you want valid tags for, or None. If None, use the local interpreter abi. """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - version_info = get_impl_version_info() - versions = get_all_minor_versions_as_strings(version_info) - - impl = impl or get_abbr_impl() - - abis = [] # type: List[str] - - abi = abi or get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() # type: Set[str] - for suffix in get_extension_suffixes(): - if suffix.startswith('.abi'): - abi3s.add(suffix.split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - if not noarch: - arch = platform or get_platform() - arch_prefix, arch_sep, arch_suffix = arch.partition('_') - if arch.startswith('macosx'): - # support macosx-10.6-intel on macosx-10.9-x86_64 - match = _osx_arch_pat.match(arch) - if match: - name, major, minor, actual_arch = match.groups() - tpl = '{}_{}_%i_%s'.format(name, major) - arches = [] - for m in reversed(range(int(minor) + 1)): - for a in get_darwin_arches(int(major), m, actual_arch): - arches.append(tpl % (m, a)) - else: - # arch pattern didn't match (?!) - arches = [arch] - elif arch_prefix == 'manylinux2014': - arches = [arch] - # manylinux1/manylinux2010 wheels run on most manylinux2014 systems - # with the exception of wheels depending on ncurses. PEP 599 states - # manylinux1/manylinux2010 wheels should be considered - # manylinux2014 wheels: - # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels - if arch_suffix in {'i686', 'x86_64'}: - arches.append('manylinux2010' + arch_sep + arch_suffix) - arches.append('manylinux1' + arch_sep + arch_suffix) - elif arch_prefix == 'manylinux2010': - # manylinux1 wheels run on most manylinux2010 systems with the - # exception of wheels depending on ncurses. PEP 571 states - # manylinux1 wheels should be considered manylinux2010 wheels: - # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches = [arch, 'manylinux1' + arch_sep + arch_suffix] - elif platform is None: - arches = [] - if is_manylinux2014_compatible(): - arches.append('manylinux2014' + arch_sep + arch_suffix) - if is_manylinux2010_compatible(): - arches.append('manylinux2010' + arch_sep + arch_suffix) - if is_manylinux1_compatible(): - arches.append('manylinux1' + arch_sep + arch_suffix) - arches.append(arch) - else: - arches = [arch] - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in arches: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in {'31', '30'}: - break - for abi in abi3s: # empty set if not Python 3 - for arch in arches: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # Has binaries, does not use the Python API: - for arch in arches: - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, but requires our implementation: - supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) + supported = [] # type: List[Tag] + + python_version = None # type: Optional[PythonVersion] + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + abis = None # type: Optional[List[str]] + if abi is not None: + abis = [abi] + + platforms = None # type: Optional[List[str]] + if platform is not None: + platforms = _get_custom_platforms(platform) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) return supported - - -implementation_tag = get_impl_tag() diff --git a/pipenv/patched/notpip/_internal/pyproject.py b/pipenv/patched/notpip/_internal/pyproject.py index bef9c37889..7d25df90e8 100644 --- a/pipenv/patched/notpip/_internal/pyproject.py +++ b/pipenv/patched/notpip/_internal/pyproject.py @@ -3,14 +3,16 @@ import io import os import sys +from collections import namedtuple from pipenv.patched.notpip._vendor import pytoml, six +from pipenv.patched.notpip._vendor.packaging.requirements import InvalidRequirement, Requirement from pipenv.patched.notpip._internal.exceptions import InstallationError from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, Tuple, Optional, List + from typing import Any, Optional, List def _is_list_of_str(obj): @@ -32,13 +34,18 @@ def make_pyproject_path(unpacked_source_directory): return path +BuildSystemDetails = namedtuple('BuildSystemDetails', [ + 'requires', 'backend', 'check', 'backend_path' +]) + + def load_pyproject_toml( use_pep517, # type: Optional[bool] pyproject_toml, # type: str setup_py, # type: str req_name # type: str ): - # type: (...) -> Optional[Tuple[List[str], str, List[str]]] + # type: (...) -> Optional[BuildSystemDetails] """Load the pyproject.toml file. Parameters: @@ -56,6 +63,8 @@ def load_pyproject_toml( name of PEP 517 backend, requirements we should check are installed after setting up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. ) """ has_pyproject = os.path.isfile(pyproject_toml) @@ -150,7 +159,23 @@ def load_pyproject_toml( reason="'build-system.requires' is not a list of strings.", )) + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "'build-system.requires' contains an invalid " + "requirement: {!r}".format(requirement) + ), + ) + ) + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) check = [] # type: List[str] if backend is None: # If the user didn't specify a backend, we assume they want to use @@ -168,4 +193,4 @@ def load_pyproject_toml( backend = "setuptools.build_meta:__legacy__" check = ["setuptools>=40.8.0", "wheel"] - return (requires, backend, check) + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/pipenv/patched/notpip/_internal/req/__init__.py b/pipenv/patched/notpip/_internal/req/__init__.py index 998be6a26a..89aefbe5ff 100644 --- a/pipenv/patched/notpip/_internal/req/__init__.py +++ b/pipenv/patched/notpip/_internal/req/__init__.py @@ -23,6 +23,16 @@ logger = logging.getLogger(__name__) +class InstallationResult(object): + def __init__(self, name): + # type: (str) -> None + self.name = name + + def __repr__(self): + # type: () -> str + return "InstallationResult(name={!r})".format(self.name) + + def install_given_reqs( to_install, # type: List[InstallRequirement] install_options, # type: List[str] @@ -30,7 +40,7 @@ def install_given_reqs( *args, # type: Any **kwargs # type: Any ): - # type: (...) -> List[InstallRequirement] + # type: (...) -> List[InstallationResult] """ Install everything in the given list. @@ -43,13 +53,12 @@ def install_given_reqs( ', '.join([req.name for req in to_install]), ) + installed = [] + with indent_log(): for requirement in to_install: - if requirement.conflicts_with: - logger.info( - 'Found existing installation: %s', - requirement.conflicts_with, - ) + if requirement.should_reinstall: + logger.info('Attempting uninstall: %s', requirement.name) with indent_log(): uninstalled_pathset = requirement.uninstall( auto_confirm=True @@ -63,7 +72,7 @@ def install_given_reqs( ) except Exception: should_rollback = ( - requirement.conflicts_with and + requirement.should_reinstall and not requirement.install_succeeded ) # if install did not succeed, rollback previous uninstall @@ -72,11 +81,12 @@ def install_given_reqs( raise else: should_commit = ( - requirement.conflicts_with and + requirement.should_reinstall and requirement.install_succeeded ) if should_commit: uninstalled_pathset.commit() - requirement.remove_temporary_source() - return to_install + installed.append(InstallationResult(requirement.name)) + + return installed diff --git a/pipenv/patched/notpip/_internal/req/constructors.py b/pipenv/patched/notpip/_internal/req/constructors.py index b1a2abe723..2c2f83a30b 100644 --- a/pipenv/patched/notpip/_internal/req/constructors.py +++ b/pipenv/patched/notpip/_internal/req/constructors.py @@ -10,7 +10,6 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False import logging import os @@ -24,6 +23,7 @@ from pipenv.patched.notpip._internal.exceptions import InstallationError from pipenv.patched.notpip._internal.models.index import PyPI, TestPyPI from pipenv.patched.notpip._internal.models.link import Link +from pipenv.patched.notpip._internal.models.wheel import Wheel from pipenv.patched.notpip._internal.pyproject import make_pyproject_path from pipenv.patched.notpip._internal.req.req_install import InstallRequirement from pipenv.patched.notpip._internal.utils.filetypes import ARCHIVE_EXTENSIONS @@ -31,7 +31,6 @@ from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING from pipenv.patched.notpip._internal.utils.urls import path_to_url from pipenv.patched.notpip._internal.vcs import is_url, vcs -from pipenv.patched.notpip._internal.wheel import Wheel if MYPY_CHECK_RUNNING: from typing import ( @@ -347,6 +346,7 @@ def parse_req_from_line(name, line_source): extras = convert_extras(extras_as_string) def with_source(text): + # type: (str) -> str if not line_source: return text return '{} (from {})'.format(text, line_source) diff --git a/pipenv/patched/notpip/_internal/req/req_file.py b/pipenv/patched/notpip/_internal/req/req_file.py index ece5498662..854c2c34a1 100644 --- a/pipenv/patched/notpip/_internal/req/req_file.py +++ b/pipenv/patched/notpip/_internal/req/req_file.py @@ -17,26 +17,35 @@ from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse from pipenv.patched.notpip._internal.cli import cmdoptions -from pipenv.patched.notpip._internal.download import get_file_content -from pipenv.patched.notpip._internal.exceptions import RequirementsFileParseError +from pipenv.patched.notpip._internal.exceptions import ( + InstallationError, + RequirementsFileParseError, +) from pipenv.patched.notpip._internal.models.search_scope import SearchScope from pipenv.patched.notpip._internal.req.constructors import ( install_req_from_editable, install_req_from_line, ) +from pipenv.patched.notpip._internal.utils.encoding import auto_decode from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.urls import get_url_scheme if MYPY_CHECK_RUNNING: + from optparse import Values from typing import ( Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple, ) + from pipenv.patched.notpip._internal.req import InstallRequirement from pipenv.patched.notpip._internal.cache import WheelCache - from pipenv.patched.notpip._internal.index import PackageFinder + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.network.session import PipSession ReqFileLines = Iterator[Tuple[int, Text]] + LineParser = Callable[[Text], Tuple[str, Values]] + + __all__ = ['parse_requirements'] SCHEME_RE = re.compile(r'^(http|https|file):', re.I) @@ -49,19 +58,19 @@ ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})') SUPPORTED_OPTIONS = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, cmdoptions.constraints, - cmdoptions.editable, cmdoptions.requirements, - cmdoptions.no_index, - cmdoptions.index_url, + cmdoptions.editable, cmdoptions.find_links, - cmdoptions.extra_index_url, - cmdoptions.always_unzip, cmdoptions.no_binary, cmdoptions.only_binary, + cmdoptions.require_hashes, cmdoptions.pre, cmdoptions.trusted_host, - cmdoptions.require_hashes, + cmdoptions.always_unzip, # Deprecated ] # type: List[Callable[..., optparse.Option]] # options to be passed to requirements @@ -75,12 +84,31 @@ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +class ParsedLine(object): + def __init__( + self, + filename, # type: str + lineno, # type: int + comes_from, # type: str + args, # type: str + opts, # type: Values + constraint, # type: bool + ): + # type: (...) -> None + self.filename = filename + self.lineno = lineno + self.comes_from = comes_from + self.args = args + self.opts = opts + self.constraint = constraint + + def parse_requirements( filename, # type: str + session, # type: PipSession finder=None, # type: Optional[PackageFinder] comes_from=None, # type: Optional[str] options=None, # type: Optional[optparse.Values] - session=None, # type: Optional[PipSession] constraint=False, # type: bool wheel_cache=None, # type: Optional[WheelCache] use_pep517=None # type: Optional[bool] @@ -89,37 +117,33 @@ def parse_requirements( """Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. + :param session: PipSession instance. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: cli options. - :param session: Instance of pip.download.PipSession. :param constraint: If true, parsing a constraint file rather than requirements file. :param wheel_cache: Instance of pip.wheel.WheelCache :param use_pep517: Value of the --use-pep517 option. """ - if session is None: - raise TypeError( - "parse_requirements() missing 1 required keyword argument: " - "'session'" - ) - - _, content = get_file_content( - filename, comes_from=comes_from, session=session + skip_requirements_regex = ( + options.skip_requirements_regex if options else None + ) + line_parser = get_line_parser(finder) + parser = RequirementsFileParser( + session, line_parser, comes_from, skip_requirements_regex ) - lines_enum = preprocess(content, options) - - for line_number, line in lines_enum: - req_iter = process_line(line, filename, line_number, finder, - comes_from, options, session, wheel_cache, - use_pep517=use_pep517, constraint=constraint) - for req in req_iter: + for parsed_line in parser.parse(filename, constraint): + req = handle_line( + parsed_line, finder, options, session, wheel_cache, use_pep517 + ) + if req is not None: yield req -def preprocess(content, options): - # type: (Text, Optional[optparse.Values]) -> ReqFileLines +def preprocess(content, skip_requirements_regex): + # type: (Text, Optional[str]) -> ReqFileLines """Split, filter, and join lines, and return a line iterator :param content: the content of the requirements file @@ -128,26 +152,23 @@ def preprocess(content, options): lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines lines_enum = join_lines(lines_enum) lines_enum = ignore_comments(lines_enum) - lines_enum = skip_regex(lines_enum, options) + if skip_requirements_regex: + lines_enum = skip_regex(lines_enum, skip_requirements_regex) lines_enum = expand_env_variables(lines_enum) return lines_enum -def process_line( - line, # type: Text - filename, # type: str - line_number, # type: int +def handle_line( + line, # type: ParsedLine finder=None, # type: Optional[PackageFinder] - comes_from=None, # type: Optional[str] options=None, # type: Optional[optparse.Values] session=None, # type: Optional[PipSession] wheel_cache=None, # type: Optional[WheelCache] use_pep517=None, # type: Optional[bool] - constraint=False, # type: bool ): - # type: (...) -> Iterator[InstallRequirement] - """Process a single requirements line; This can result in creating/yielding - requirements, or updating the finder. + # type: (...) -> Optional[InstallRequirement] + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. For lines that contain requirements, the only options that have an effect are from SUPPORTED_OPTIONS_REQ, and they are scoped to the @@ -159,104 +180,65 @@ def process_line( be present, but are ignored. These lines may contain multiple options (although our docs imply only one is supported), and all our parsed and affect the finder. - - :param constraint: If True, parsing a constraints file. - :param options: OptionParser options that we may update """ - parser = build_parser(line) - defaults = parser.get_default_values() - defaults.index_url = None - if finder: - defaults.format_control = finder.format_control - args_str, options_str = break_args_options(line) - # Prior to 2.7.3, shlex cannot deal with unicode entries - if sys.version_info < (2, 7, 3): - # https://github.com/python/mypy/issues/1174 - options_str = options_str.encode('utf8') # type: ignore - # https://github.com/python/mypy/issues/1174 - opts, _ = parser.parse_args( - shlex.split(options_str), defaults) # type: ignore # preserve for the nested code path line_comes_from = '%s %s (line %s)' % ( - '-c' if constraint else '-r', filename, line_number, + '-c' if line.constraint else '-r', line.filename, line.lineno, ) - # yield a line requirement - if args_str: + # return a line requirement + if line.args: isolated = options.isolated_mode if options else False if options: - cmdoptions.check_install_build_global(options, opts) + cmdoptions.check_install_build_global(options, line.opts) # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in opts.__dict__ and opts.__dict__[dest]: - req_options[dest] = opts.__dict__[dest] - line_source = 'line {} of {}'.format(line_number, filename) - yield install_req_from_line( - args_str, + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + line_source = 'line {} of {}'.format(line.lineno, line.filename) + return install_req_from_line( + line.args, comes_from=line_comes_from, use_pep517=use_pep517, isolated=isolated, options=req_options, wheel_cache=wheel_cache, - constraint=constraint, + constraint=line.constraint, line_source=line_source, ) - # yield an editable requirement - elif opts.editables: + # return an editable requirement + elif line.opts.editables: isolated = options.isolated_mode if options else False - yield install_req_from_editable( - opts.editables[0], comes_from=line_comes_from, + return install_req_from_editable( + line.opts.editables[0], comes_from=line_comes_from, use_pep517=use_pep517, - constraint=constraint, isolated=isolated, wheel_cache=wheel_cache + constraint=line.constraint, isolated=isolated, + wheel_cache=wheel_cache ) - # parse a nested requirements file - elif opts.requirements or opts.constraints: - if opts.requirements: - req_path = opts.requirements[0] - nested_constraint = False - else: - req_path = opts.constraints[0] - nested_constraint = True - # original file is over http - if SCHEME_RE.search(filename): - # do a url join so relative paths work - req_path = urllib_parse.urljoin(filename, req_path) - # original file and nested file are paths - elif not SCHEME_RE.search(req_path): - # do a join so relative paths work - req_path = os.path.join(os.path.dirname(filename), req_path) - # TODO: Why not use `comes_from='-r {} (line {})'` here as well? - parsed_reqs = parse_requirements( - req_path, finder, comes_from, options, session, - constraint=nested_constraint, wheel_cache=wheel_cache - ) - for req in parsed_reqs: - yield req - # percolate hash-checking option upward - elif opts.require_hashes: - options.require_hashes = opts.require_hashes + elif line.opts.require_hashes: + options.require_hashes = line.opts.require_hashes # set finder options elif finder: find_links = finder.find_links index_urls = finder.index_urls - if opts.index_url: - index_urls = [opts.index_url] - if opts.no_index is True: + if line.opts.index_url: + index_urls = [line.opts.index_url] + if line.opts.no_index is True: index_urls = [] - if opts.extra_index_urls: - index_urls.extend(opts.extra_index_urls) - if opts.find_links: + if line.opts.extra_index_urls: + index_urls.extend(line.opts.extra_index_urls) + if line.opts.find_links: # FIXME: it would be nice to keep track of the source # of the find_links: support a find-links local path # relative to a requirements file. - value = opts.find_links[0] - req_dir = os.path.dirname(os.path.abspath(filename)) + value = line.opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(line.filename)) relative_to_reqs_file = os.path.join(req_dir, value) if os.path.exists(relative_to_reqs_file): value = relative_to_reqs_file @@ -268,11 +250,123 @@ def process_line( ) finder.search_scope = search_scope - if opts.pre: + if line.opts.pre: finder.set_allow_all_prereleases() - for host in opts.trusted_hosts or []: - source = 'line {} of {}'.format(line_number, filename) - session.add_trusted_host(host, source=source) + + if session: + for host in line.opts.trusted_hosts or []: + source = 'line {} of {}'.format(line.lineno, line.filename) + session.add_trusted_host(host, source=source) + + return None + + +class RequirementsFileParser(object): + def __init__( + self, + session, # type: PipSession + line_parser, # type: LineParser + comes_from, # type: str + skip_requirements_regex, # type: Optional[str] + ): + # type: (...) -> None + self._session = session + self._line_parser = line_parser + self._comes_from = comes_from + self._skip_requirements_regex = skip_requirements_regex + + def parse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + """Parse a given file, yielding parsed lines. + """ + for line in self._parse_and_recurse(filename, constraint): + yield line + + def _parse_and_recurse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + for line in self._parse_file(filename, constraint): + if ( + not line.args and + not line.opts.editables and + (line.opts.requirements or line.opts.constraints) + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), req_path, + ) + + for inner_line in self._parse_and_recurse( + req_path, nested_constraint, + ): + yield inner_line + else: + yield line + + def _parse_file(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + _, content = get_file_content( + filename, self._session, comes_from=self._comes_from + ) + + lines_enum = preprocess(content, self._skip_requirements_regex) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = 'Invalid requirement: %s\n%s' % (line, e.msg) + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + self._comes_from, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder): + # type: (Optional[PackageFinder]) -> LineParser + def parse_line(line): + # type: (Text) -> Tuple[str, Values] + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + # Prior to 2.7.3, shlex cannot deal with unicode entries + if sys.version_info < (2, 7, 3): + # https://github.com/python/mypy/issues/1174 + options_str = options_str.encode('utf8') # type: ignore + + # https://github.com/python/mypy/issues/1174 + opts, _ = parser.parse_args( + shlex.split(options_str), defaults) # type: ignore + + return args_str, opts + + return parse_line def break_args_options(line): @@ -293,8 +387,14 @@ def break_args_options(line): return ' '.join(args), ' '.join(options) # type: ignore -def build_parser(line): - # type: (Text) -> optparse.OptionParser +class OptionParsingError(Exception): + def __init__(self, msg): + # type: (str) -> None + self.msg = msg + + +def build_parser(): + # type: () -> optparse.OptionParser """ Return a parser for parsing requirement lines """ @@ -309,9 +409,7 @@ def build_parser(line): # that in our own exception. def parser_exit(self, msg): # type: (Any, str) -> NoReturn - # add offending line - msg = 'Invalid requirement: %s\n%s' % (line, msg) - raise RequirementsFileParseError(msg) + raise OptionParsingError(msg) # NOTE: mypy disallows assigning to a method # https://github.com/python/mypy/issues/2427 parser.exit = parser_exit # type: ignore @@ -361,17 +459,15 @@ def ignore_comments(lines_enum): yield line_number, line -def skip_regex(lines_enum, options): - # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines +def skip_regex(lines_enum, pattern): + # type: (ReqFileLines, str) -> ReqFileLines """ - Skip lines that match '--skip-requirements-regex' pattern + Skip lines that match the provided pattern Note: the regex pattern is only built once """ - skip_regex = options.skip_requirements_regex if options else None - if skip_regex: - pattern = re.compile(skip_regex) - lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) + matcher = re.compile(pattern) + lines_enum = filterfalse(lambda e: matcher.search(e[1]), lines_enum) return lines_enum @@ -401,3 +497,50 @@ def expand_env_variables(lines_enum): line = line.replace(env_var, value) yield line_number, line + + +def get_file_content(url, session, comes_from=None): + # type: (str, PipSession, Optional[str]) -> Tuple[str, Text] + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + :param comes_from: Origin description of requirements. + """ + scheme = get_url_scheme(url) + + if scheme in ['http', 'https']: + # FIXME: catch some errors + resp = session.get(url) + resp.raise_for_status() + return resp.url, resp.text + + elif scheme == 'file': + if comes_from and comes_from.startswith('http'): + raise InstallationError( + 'Requirements file %s references URL %s, which is local' + % (comes_from, url)) + + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: %s' % str(exc) + ) + return url, content + + +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) diff --git a/pipenv/patched/notpip/_internal/req/req_install.py b/pipenv/patched/notpip/_internal/req/req_install.py index 2da04659cc..efc90bfe33 100644 --- a/pipenv/patched/notpip/_internal/req/req_install.py +++ b/pipenv/patched/notpip/_internal/req/req_install.py @@ -1,17 +1,13 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -# mypy: disallow-untyped-defs=False from __future__ import absolute_import -import atexit import logging import os import shutil import sys -import sysconfig import zipfile -from distutils.util import change_root from pipenv.patched.notpip._vendor import pkg_resources, six from pipenv.patched.notpip._vendor.packaging.requirements import Requirement @@ -20,39 +16,40 @@ from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version from pipenv.patched.notpip._vendor.pep517.wrappers import Pep517HookCaller -from pipenv.patched.notpip._internal import pep425tags, wheel +from pipenv.patched.notpip._internal import pep425tags from pipenv.patched.notpip._internal.build_env import NoOpBuildEnvironment from pipenv.patched.notpip._internal.exceptions import InstallationError +from pipenv.patched.notpip._internal.locations import get_scheme from pipenv.patched.notpip._internal.models.link import Link -from pipenv.patched.notpip._internal.operations.generate_metadata import get_metadata_generator +from pipenv.patched.notpip._internal.operations.build.metadata import generate_metadata +from pipenv.patched.notpip._internal.operations.build.metadata_legacy import \ + generate_metadata as generate_metadata_legacy +from pipenv.patched.notpip._internal.operations.install.editable_legacy import \ + install_editable as install_editable_legacy +from pipenv.patched.notpip._internal.operations.install.legacy import install as install_legacy +from pipenv.patched.notpip._internal.operations.install.wheel import install_wheel from pipenv.patched.notpip._internal.pyproject import load_pyproject_toml, make_pyproject_path from pipenv.patched.notpip._internal.req.req_uninstall import UninstallPathSet -from pipenv.patched.notpip._internal.utils.compat import native_str +from pipenv.patched.notpip._internal.utils.deprecation import deprecated from pipenv.patched.notpip._internal.utils.hashes import Hashes from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.marker_files import ( PIP_DELETE_MARKER_FILENAME, has_delete_marker_file, + write_delete_marker_file, ) from pipenv.patched.notpip._internal.utils.misc import ( - _make_build_dir, ask_path_exists, backup_dir, display_path, dist_in_site_packages, dist_in_usersite, - ensure_dir, get_installed_version, hide_url, redact_auth_from_url, rmtree, ) from pipenv.patched.notpip._internal.utils.packaging import get_metadata -from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_shim_args -from pipenv.patched.notpip._internal.utils.subprocess import ( - call_subprocess, - runner_with_spinner_message, -) from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING from pipenv.patched.notpip._internal.utils.virtualenv import running_under_virtualenv @@ -64,7 +61,7 @@ ) from pipenv.patched.notpip._internal.build_env import BuildEnvironment from pipenv.patched.notpip._internal.cache import WheelCache - from pipenv.patched.notpip._internal.index import PackageFinder + from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._vendor.pkg_resources import Distribution from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet from pipenv.patched.notpip._vendor.packaging.markers import Marker @@ -73,6 +70,32 @@ logger = logging.getLogger(__name__) +def _get_dist(metadata_directory): + # type: (str) -> Distribution + """Return a pkg_resources.Distribution for the provided + metadata directory. + """ + dist_dir = metadata_directory.rstrip(os.sep) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + dist_name = os.path.splitext(dist_dir_name)[0] + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + return dist_cls( + base_dir, + project_name=dist_name, + metadata=metadata, + ) + + class InstallRequirement(object): """ Represents something that may be installed later on, may have information @@ -111,6 +134,10 @@ def __init__( # PEP 508 URL requirement link = Link(req.url) self.link = self.original_link = link + # Path to any downloaded or already-existing package. + self.local_file_path = None # type: Optional[str] + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path if extras: self.extras = extras @@ -126,15 +153,12 @@ def __init__( # This holds the pkg_resources.Distribution object if this requirement # is already available: - self.satisfied_by = None - # This hold the pkg_resources.Distribution object if this requirement - # conflicts with another installed distribution: - self.conflicts_with = None + self.satisfied_by = None # type: Optional[Distribution] + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False # Temporary build location self._temp_build_dir = None # type: Optional[TempDirectory] - # Used to store the global directory where the _temp_build_dir should - # have been created. Cf move_to_correct_build_directory method. - self._ideal_build_dir = None # type: Optional[str] # Set to True after successful installation self.install_succeeded = None # type: Optional[bool] self.options = options if options else {} @@ -240,7 +264,7 @@ def name(self): # type: () -> Optional[str] if self.req is None: return None - return native_str(pkg_resources.safe_name(self.req.name)) + return six.ensure_str(pkg_resources.safe_name(self.req.name)) @property def specifier(self): @@ -332,15 +356,10 @@ def ensure_build_location(self, build_dir): assert self._temp_build_dir.path return self._temp_build_dir.path if self.req is None: - # for requirement via a path to a directory: the name of the - # package is not available yet so we create a temp directory - # Once run_egg_info will have run, we'll be able to fix it via - # move_to_correct_build_directory(). # Some systems have /tmp as a symlink which confuses custom # builds (such as numpy). Thus, we ensure that the real path # is returned. self._temp_build_dir = TempDirectory(kind="req-build") - self._ideal_build_dir = build_dir return self._temp_build_dir.path if self.editable: @@ -351,64 +370,47 @@ def ensure_build_location(self, build_dir): # need this) if not os.path.exists(build_dir): logger.debug('Creating directory %s', build_dir) - _make_build_dir(build_dir) + os.makedirs(build_dir) + write_delete_marker_file(build_dir) return os.path.join(build_dir, name) - def move_to_correct_build_directory(self): + def _set_requirement(self): # type: () -> None - """Move self._temp_build_dir to "self._ideal_build_dir/self.req.name" - - For some requirements (e.g. a path to a directory), the name of the - package is not available until we run egg_info, so the build_location - will return a temporary directory and store the _ideal_build_dir. - - This is only called to "fix" the build directory after generating - metadata. + """Set requirement after generating metadata. """ - if self.source_dir is not None: - return - assert self.req is not None - assert self._temp_build_dir - assert ( - self._ideal_build_dir is not None and - self._ideal_build_dir.path # type: ignore - ) - old_location = self._temp_build_dir - self._temp_build_dir = None # checked inside ensure_build_location - - # Figure out the correct place to put the files. - new_location = self.ensure_build_location(self._ideal_build_dir) - if os.path.exists(new_location): - raise InstallationError( - 'A package already exists in %s; please remove it to continue' - % display_path(new_location) - ) - - # Move the files to the correct location. - logger.debug( - 'Moving package %s from %s to new location %s', - self, display_path(old_location.path), display_path(new_location), - ) - shutil.move(old_location.path, new_location) + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None - # Update directory-tracking variables, to be in line with new_location - self.source_dir = os.path.normpath(os.path.abspath(new_location)) - self._temp_build_dir = TempDirectory( - path=new_location, kind="req-install", + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join([ + self.metadata["Name"], + op, + self.metadata["Version"], + ]) ) - # Correct the metadata directory, if it exists - if self.metadata_directory: - old_meta = self.metadata_directory - rel = os.path.relpath(old_meta, start=old_location.path) - new_meta = os.path.join(new_location, rel) - new_meta = os.path.normpath(os.path.abspath(new_meta)) - self.metadata_directory = new_meta + def warn_on_mismatching_name(self): + # type: () -> None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return - # Done with any "move built files" work, since have moved files to the - # "ideal" build location. Setting to None allows to clearly flag that - # no more moves are needed. - self._ideal_build_dir = None + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + 'Generating metadata for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.name, metadata_name, self.name + ) + self.req = Requirement(metadata_name) def remove_temporary_source(self): # type: () -> None @@ -424,36 +426,30 @@ def remove_temporary_source(self): self.build_env.cleanup() def check_if_exists(self, use_user_site): - # type: (bool) -> bool + # type: (bool) -> None """Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately. + self.should_reinstall appropriately. """ if self.req is None: - return False + return + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None try: - # get_distribution() will resolve the entire list of requirements - # anyway, and we've already determined that we need the requirement - # in question, so strip the marker so that we don't try to - # evaluate it. - no_marker = Requirement(str(self.req)) - no_marker.marker = None self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) - if self.editable and self.satisfied_by: - self.conflicts_with = self.satisfied_by - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - return True except pkg_resources.DistributionNotFound: - return False + return except pkg_resources.VersionConflict: existing_dist = pkg_resources.get_distribution( self.req.name ) if use_user_site: if dist_in_usersite(existing_dist): - self.conflicts_with = existing_dist + self.should_reinstall = True elif (running_under_virtualenv() and dist_in_site_packages(existing_dist)): raise InstallationError( @@ -462,8 +458,13 @@ def check_if_exists(self, use_user_site): (existing_dist.project_name, existing_dist.location) ) else: - self.conflicts_with = existing_dist - return True + self.should_reinstall = True + else: + if self.editable and self.satisfied_by: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None # Things valid for wheels @property @@ -473,28 +474,6 @@ def is_wheel(self): return False return self.link.is_wheel - def move_wheel_files( - self, - wheeldir, # type: str - root=None, # type: Optional[str] - home=None, # type: Optional[str] - prefix=None, # type: Optional[str] - warn_script_location=True, # type: bool - use_user_site=False, # type: bool - pycompile=True # type: bool - ): - # type: (...) -> None - wheel.move_wheel_files( - self.name, self.req, wheeldir, - user=use_user_site, - home=home, - root=root, - prefix=prefix, - pycompile=pycompile, - isolated=self.isolated, - warn_script_location=warn_script_location, - ) - # Things valid for sdists @property def unpacked_source_directory(self): @@ -542,11 +521,34 @@ def load_pyproject_toml(self): return self.use_pep517 = True - requires, backend, check = pyproject_toml_data + requires, backend, check, backend_path = pyproject_toml_data self.requirements_to_check = check self.pyproject_requires = requires self.pep517_backend = Pep517HookCaller( - self.unpacked_source_directory, backend + self.unpacked_source_directory, backend, backend_path=backend_path, + ) + + def _generate_metadata(self): + # type: () -> str + """Invokes metadata generator functions, with the required arguments. + """ + if not self.use_pep517: + assert self.unpacked_source_directory + + return generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + editable=self.editable, + isolated=self.isolated, + details=self.name or "from {}".format(self.link) + ) + + assert self.pep517_backend is not None + + return generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, ) def prepare_metadata(self): @@ -558,56 +560,16 @@ def prepare_metadata(self): """ assert self.source_dir - metadata_generator = get_metadata_generator(self) with indent_log(): - self.metadata_directory = metadata_generator(self) + self.metadata_directory = self._generate_metadata() - if not self.req: - if isinstance(parse_version(self.metadata["Version"]), Version): - op = "==" - else: - op = "===" - self.req = Requirement( - "".join([ - self.metadata["Name"], - op, - self.metadata["Version"], - ]) - ) - self.move_to_correct_build_directory() + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() else: - metadata_name = canonicalize_name(self.metadata["Name"]) - if canonicalize_name(self.req.name) != metadata_name: - logger.warning( - 'Generating metadata for package %s ' - 'produced metadata for project name %s. Fix your ' - '#egg=%s fragments.', - self.name, metadata_name, self.name - ) - self.req = Requirement(metadata_name) + self.warn_on_mismatching_name() - def prepare_pep517_metadata(self): - # type: () -> str - assert self.pep517_backend is not None - - # NOTE: This needs to be refactored to stop using atexit - metadata_tmpdir = TempDirectory(kind="modern-metadata") - atexit.register(metadata_tmpdir.cleanup) - - metadata_dir = metadata_tmpdir.path - - with self.build_env: - # Note that Pep517HookCaller implements a fallback for - # prepare_metadata_for_build_wheel, so we don't have to - # consider the possibility that this hook doesn't exist. - runner = runner_with_spinner_message("Preparing wheel metadata") - backend = self.pep517_backend - with backend.subprocess_runner(runner): - distinfo_dir = backend.prepare_metadata_for_build_wheel( - metadata_dir - ) - - return os.path.join(metadata_dir, distinfo_dir) + self.assert_source_matches_version() @property def metadata(self): @@ -619,26 +581,7 @@ def metadata(self): def get_dist(self): # type: () -> Distribution - """Return a pkg_resources.Distribution for this requirement""" - dist_dir = self.metadata_directory.rstrip(os.sep) - - # Determine the correct Distribution object type. - if dist_dir.endswith(".egg-info"): - dist_cls = pkg_resources.Distribution - else: - assert dist_dir.endswith(".dist-info") - dist_cls = pkg_resources.DistInfoDistribution - - # Build a PathMetadata object, from path to metadata. :wink: - base_dir, dist_dir_name = os.path.split(dist_dir) - dist_name = os.path.splitext(dist_dir_name)[0] - metadata = pkg_resources.PathMetadata(base_dir, dist_dir) - - return dist_cls( - base_dir, - project_name=dist_name, - metadata=metadata, - ) + return _get_dist(self.metadata_directory) def assert_source_matches_version(self): # type: () -> None @@ -674,34 +617,6 @@ def ensure_has_source_dir(self, parent_dir): self.source_dir = self.ensure_build_location(parent_dir) # For editable installations - def install_editable( - self, - install_options, # type: List[str] - global_options=(), # type: Sequence[str] - prefix=None # type: Optional[str] - ): - # type: (...) -> None - logger.info('Running setup.py develop for %s', self.name) - - if prefix: - prefix_param = ['--prefix={}'.format(prefix)] - install_options = list(install_options) + prefix_param - base_cmd = make_setuptools_shim_args( - self.setup_py_path, - global_options=global_options, - no_user_config=self.isolated - ) - with indent_log(): - with self.build_env: - call_subprocess( - base_cmd + - ['develop', '--no-deps'] + - list(install_options), - cwd=self.unpacked_source_directory, - ) - - self.install_succeeded = True - def update_editable(self, obtain=True): # type: (bool) -> None if not self.link: @@ -720,6 +635,20 @@ def update_editable(self, obtain=True): vc_type, url = self.link.url.split('+', 1) vcs_backend = vcs.get_backend(vc_type) if vcs_backend: + if not self.link.is_vcs: + reason = ( + "This form of VCS requirement is being deprecated: {}." + ).format( + self.link.url + ) + replacement = None + if self.link.url.startswith("git+git@"): + replacement = ( + "git+https://git@example.com/..., " + "git+ssh://git@example.com/..., " + "or the insecure git+git://git@example.com/..." + ) + deprecated(reason, replacement, gone_in="21.0", issue=7554) hidden_url = hide_url(self.link.url) if obtain: vcs_backend.obtain(self.source_dir, url=hidden_url) @@ -731,9 +660,8 @@ def update_editable(self, obtain=True): % (self.link, vc_type)) # Top-level Actions - def uninstall(self, auto_confirm=False, verbose=False, - use_user_site=False): - # type: (bool, bool, bool) -> Optional[UninstallPathSet] + def uninstall(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> Optional[UninstallPathSet] """ Uninstall the distribution currently satisfying this requirement. @@ -746,28 +674,33 @@ def uninstall(self, auto_confirm=False, verbose=False, linked to global site-packages. """ - if not self.check_if_exists(use_user_site): + assert self.req + try: + dist = pkg_resources.get_distribution(self.req.name) + except pkg_resources.DistributionNotFound: logger.warning("Skipping %s as it is not installed.", self.name) return None - dist = self.satisfied_by or self.conflicts_with + else: + logger.info('Found existing installation: %s', dist) uninstalled_pathset = UninstallPathSet.from_dist(dist) uninstalled_pathset.remove(auto_confirm, verbose) return uninstalled_pathset - def _clean_zip_name(self, name, prefix): # only used by archive. - # type: (str, str) -> str - assert name.startswith(prefix + os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix) - ) - name = name[len(prefix) + 1:] - name = name.replace(os.path.sep, '/') - return name - def _get_archive_name(self, path, parentdir, rootdir): # type: (str, str, str) -> str + + def _clean_zip_name(name, prefix): + # type: (str, str) -> str + assert name.startswith(prefix + os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + path = os.path.join(parentdir, path) - name = self._clean_zip_name(path, rootdir) + name = _clean_zip_name(path, rootdir) return self.name + '/' + name def archive(self, build_dir): @@ -845,122 +778,53 @@ def install( pycompile=True # type: bool ): # type: (...) -> None + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + global_options = global_options if global_options is not None else [] if self.editable: - self.install_editable( - install_options, global_options, prefix=prefix, + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, ) + self.install_succeeded = True return - if self.is_wheel: - version = wheel.wheel_version(self.source_dir) - wheel.check_compatibility(version, self.name) - self.move_wheel_files( - self.source_dir, root=root, prefix=prefix, home=home, + if self.is_wheel: + assert self.local_file_path + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, warn_script_location=warn_script_location, - use_user_site=use_user_site, pycompile=pycompile, ) self.install_succeeded = True return - # Extend the list of global and install options passed on to - # the setup.py call with the ones from the requirements file. - # Options specified in requirements file override those - # specified on the command line, since the last option given - # to setup.py is the one that is used. - global_options = list(global_options) + \ - self.options.get('global_options', []) - install_options = list(install_options) + \ - self.options.get('install_options', []) - - with TempDirectory(kind="record") as temp_dir: - record_filename = os.path.join(temp_dir.path, 'install-record.txt') - install_args = self.get_install_args( - global_options, record_filename, root, prefix, pycompile, - ) - - runner = runner_with_spinner_message( - "Running setup.py install for {}".format(self.name) - ) - with indent_log(), self.build_env: - runner( - cmd=install_args + install_options, - cwd=self.unpacked_source_directory, - ) - - if not os.path.exists(record_filename): - logger.debug('Record file %s not found', record_filename) - return - self.install_succeeded = True - - def prepend_root(path): - # type: (str) -> str - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - with open(record_filename) as f: - for line in f: - directory = os.path.dirname(line) - if directory.endswith('.egg-info'): - egg_info_dir = prepend_root(directory) - break - else: - logger.warning( - 'Could not find .egg-info directory in install record' - ' for %s', - self, - ) - # FIXME: put the record somewhere - return - new_lines = [] - with open(record_filename) as f: - for line in f: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append( - os.path.relpath(prepend_root(filename), egg_info_dir) - ) - new_lines.sort() - ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') - with open(inst_files_path, 'w') as f: - f.write('\n'.join(new_lines) + '\n') - - def get_install_args( - self, - global_options, # type: Sequence[str] - record_filename, # type: str - root, # type: Optional[str] - prefix, # type: Optional[str] - pycompile # type: bool - ): - # type: (...) -> List[str] - install_args = make_setuptools_shim_args( - self.setup_py_path, + install_legacy( + self, + install_options=install_options, global_options=global_options, - no_user_config=self.isolated, - unbuffered_output=True + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, ) - install_args += ['install', '--record', record_filename] - install_args += ['--single-version-externally-managed'] - - if root is not None: - install_args += ['--root', root] - if prefix is not None: - install_args += ['--prefix', prefix] - - if pycompile: - install_args += ["--compile"] - else: - install_args += ["--no-compile"] - - if running_under_virtualenv(): - py_ver_str = 'python' + sysconfig.get_python_version() - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - py_ver_str, self.name)] - - return install_args diff --git a/pipenv/patched/notpip/_internal/req/req_set.py b/pipenv/patched/notpip/_internal/req/req_set.py index d99dc43497..919bc8f74e 100644 --- a/pipenv/patched/notpip/_internal/req/req_set.py +++ b/pipenv/patched/notpip/_internal/req/req_set.py @@ -10,9 +10,9 @@ from pipenv.patched.notpip._internal import pep425tags from pipenv.patched.notpip._internal.exceptions import InstallationError +from pipenv.patched.notpip._internal.models.wheel import Wheel from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.wheel import Wheel if MYPY_CHECK_RUNNING: from typing import Dict, Iterable, List, Optional, Tuple @@ -24,13 +24,12 @@ class RequirementSet(object): - def __init__(self, require_hashes=False, check_supported_wheels=True, ignore_compatibility=True): - # type: (bool) -> None + def __init__(self, check_supported_wheels=True, ignore_compatibility=True): + # type: (bool, bool) -> None """Create a RequirementSet. """ self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501 - self.require_hashes = require_hashes self.check_supported_wheels = check_supported_wheels self.unnamed_requirements = [] # type: List[InstallRequirement] diff --git a/pipenv/patched/notpip/_internal/req/req_tracker.py b/pipenv/patched/notpip/_internal/req/req_tracker.py index 1fa4fe7e10..b4d0c98430 100644 --- a/pipenv/patched/notpip/_internal/req/req_tracker.py +++ b/pipenv/patched/notpip/_internal/req/req_tracker.py @@ -9,34 +9,74 @@ import logging import os +from pipenv.patched.notpip._vendor import contextlib2 + from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from types import TracebackType - from typing import Iterator, Optional, Set, Type + from typing import Dict, Iterator, Optional, Set, Type, Union from pipenv.patched.notpip._internal.req.req_install import InstallRequirement from pipenv.patched.notpip._internal.models.link import Link logger = logging.getLogger(__name__) +@contextlib.contextmanager +def update_env_context_manager(**changes): + # type: (str) -> Iterator[None] + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values = {} # type: Dict[str, Union[object, str]] + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker(): + # type: () -> Iterator[RequirementTracker] + root = os.environ.get('PIP_REQ_TRACKER') + with contextlib2.ExitStack() as ctx: + if root is None: + root = ctx.enter_context( + TempDirectory(kind='req-tracker') + ).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + class RequirementTracker(object): - def __init__(self): - # type: () -> None - self._root = os.environ.get('PIP_REQ_TRACKER') - if self._root is None: - self._temp_dir = TempDirectory(delete=False, kind='req-tracker') - self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path - logger.debug('Created requirements tracker %r', self._root) - else: - self._temp_dir = None - logger.debug('Re-using requirements tracker %r', self._root) + def __init__(self, root): + # type: (str) -> None + self._root = root self._entries = set() # type: Set[InstallRequirement] + logger.debug("Created build tracker: %s", self._root) def __enter__(self): # type: () -> RequirementTracker + logger.debug("Entered build tracker: %s", self._root) return self def __exit__( @@ -55,40 +95,52 @@ def _entry_path(self, link): def add(self, req): # type: (InstallRequirement) -> None - link = req.link - info = str(req) - entry_path = self._entry_path(link) + """Add an InstallRequirement to build tracking. + """ + + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. try: with open(entry_path) as fp: - # Error, these's already a build in progress. - raise LookupError('%s is already being built: %s' - % (link, fp.read())) + contents = fp.read() except IOError as e: + # if the error is anything other than "file does not exist", raise. if e.errno != errno.ENOENT: raise - assert req not in self._entries - with open(entry_path, 'w') as fp: - fp.write(info) - self._entries.add(req) - logger.debug('Added %s to build tracker %r', req, self._root) + else: + message = '%s is already being built: %s' % (req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, 'w') as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug('Added %s to build tracker %r', req, self._root) def remove(self, req): # type: (InstallRequirement) -> None - link = req.link + """Remove an InstallRequirement from build tracking. + """ + + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) self._entries.remove(req) - os.unlink(self._entry_path(link)) + logger.debug('Removed %s from build tracker %r', req, self._root) def cleanup(self): # type: () -> None for req in set(self._entries): self.remove(req) - remove = self._temp_dir is not None - if remove: - self._temp_dir.cleanup() - logger.debug('%s build tracker %r', - 'Removed' if remove else 'Cleaned', - self._root) + + logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager def track(self, req): diff --git a/pipenv/patched/notpip/_internal/req/req_uninstall.py b/pipenv/patched/notpip/_internal/req/req_uninstall.py index add3418c0a..d11a222974 100644 --- a/pipenv/patched/notpip/_internal/req/req_uninstall.py +++ b/pipenv/patched/notpip/_internal/req/req_uninstall.py @@ -59,7 +59,7 @@ def _script_names(dist, script_name, is_gui): def _unique(fn): - # type: (Callable) -> Callable[..., Iterator[Any]] + # type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]] @functools.wraps(fn) def unique(*args, **kw): # type: (Any, Any) -> Iterator[Any] @@ -295,7 +295,7 @@ def rollback(self): # type: () -> None """Undoes the uninstall by moving stashed files back.""" for p in self._moves: - logging.info("Moving to %s\n from %s", *p) + logger.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: diff --git a/pipenv/patched/notpip/_internal/self_outdated_check.py b/pipenv/patched/notpip/_internal/self_outdated_check.py index 8bf5e9f719..01b9bdec12 100644 --- a/pipenv/patched/notpip/_internal/self_outdated_check.py +++ b/pipenv/patched/notpip/_internal/self_outdated_check.py @@ -14,11 +14,10 @@ from pipenv.patched.notpip._vendor.packaging import version as packaging_version from pipenv.patched.notpip._vendor.six import ensure_binary -from pipenv.patched.notpip._internal.collector import LinkCollector -from pipenv.patched.notpip._internal.index import PackageFinder +from pipenv.patched.notpip._internal.index.collector import LinkCollector +from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.models.search_scope import SearchScope from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences -from pipenv.patched.notpip._internal.utils.compat import WINDOWS from pipenv.patched.notpip._internal.utils.filesystem import ( adjacent_tmp_file, check_path_owner, @@ -225,12 +224,11 @@ def pip_self_version_check(session, options): if not local_version_is_older: return - # Advise "python -m pip" on Windows to avoid issues - # with overwriting pip.exe. - if WINDOWS: - pip_cmd = "python -m pip" - else: - pip_cmd = "pip" + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. + pip_cmd = "{} -m pip".format(sys.executable) logger.warning( "You are using pip version %s; however, version %s is " "available.\nYou should consider upgrading via the " diff --git a/pipenv/patched/notpip/_internal/utils/appdirs.py b/pipenv/patched/notpip/_internal/utils/appdirs.py index c1ba02d214..ab99ec2383 100644 --- a/pipenv/patched/notpip/_internal/utils/appdirs.py +++ b/pipenv/patched/notpip/_internal/utils/appdirs.py @@ -1,19 +1,17 @@ """ -This code was taken from https://github.com/ActiveState/appdirs and modified -to suit our purposes. -""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" from __future__ import absolute_import import os -import sys -from pipenv.patched.notpip._vendor.six import PY2, text_type +from pipenv.patched.notpip._vendor import appdirs as _appdirs -from pipenv.patched.notpip._internal.utils.compat import WINDOWS, expanduser from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: @@ -22,255 +20,22 @@ def user_cache_dir(appname): # type: (str) -> str - r""" - Return full path to the user-specific cache dir for this application. - - "appname" is the name of application. - - Typical user cache directories are: - macOS: ~/Library/Caches/ - Unix: ~/.cache/ (XDG default) - Windows: C:\Users\\AppData\Local\\Cache - - On Windows the only suggestion in the MSDN docs is that local settings go - in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the - non-roaming app data dir (the default returned by `user_data_dir`). Apps - typically put cache data somewhere *under* the given dir here. Some - examples: - ...\Mozilla\Firefox\Profiles\\Cache - ...\Acme\SuperApp\Cache\1.0 - - OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. - """ - if WINDOWS: - # Get the base path - path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) - - # When using Python 2, return paths as bytes on Windows like we do on - # other operating systems. See helper function docs for more details. - if PY2 and isinstance(path, text_type): - path = _win_path_to_bytes(path) - - # Add our app name and Cache directory to it - path = os.path.join(path, appname, "Cache") - elif sys.platform == "darwin": - # Get the base path - path = expanduser("~/Library/Caches") - - # Add our app name to it - path = os.path.join(path, appname) - else: - # Get the base path - path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) - - # Add our app name to it - path = os.path.join(path, appname) - - return path - - -def user_data_dir(appname, roaming=False): - # type: (str, bool) -> str - r""" - Return full path to the user-specific data dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default False) can be set True to use the Windows - roaming appdata directory. That means that for users on a Windows - network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - macOS: ~/Library/Application Support/ - if it exists, else ~/.config/ - Unix: ~/.local/share/ # or in - $XDG_DATA_HOME, if defined - Win XP (not roaming): C:\Documents and Settings\\ ... - ...Application Data\ - Win XP (roaming): C:\Documents and Settings\\Local ... - ...Settings\Application Data\ - Win 7 (not roaming): C:\\Users\\AppData\Local\ - Win 7 (roaming): C:\\Users\\AppData\Roaming\ - - For Unix, we follow the XDG spec and support $XDG_DATA_HOME. - That means, by default "~/.local/share/". - """ - if WINDOWS: - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" - path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) - elif sys.platform == "darwin": - path = os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) if os.path.isdir(os.path.join( - expanduser('~/Library/Application Support/'), - appname, - ) - ) else os.path.join( - expanduser('~/.config/'), - appname, - ) - else: - path = os.path.join( - os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), - appname, - ) - - return path + return _appdirs.user_cache_dir(appname, appauthor=False) def user_config_dir(appname, roaming=True): # type: (str, bool) -> str - """Return full path to the user-specific config dir for this application. - - "appname" is the name of application. - If None, just the system directory is returned. - "roaming" (boolean, default True) can be set False to not use the - Windows roaming appdata directory. That means that for users on a - Windows network setup for roaming profiles, this user data will be - sync'd on login. See - - for a discussion of issues. - - Typical user data directories are: - macOS: same as user_data_dir - Unix: ~/.config/ - Win *: same as user_data_dir + return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) - For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. - That means, by default "~/.config/". - """ - if WINDOWS: - path = user_data_dir(appname, roaming=roaming) - elif sys.platform == "darwin": - path = user_data_dir(appname) - else: - path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) - path = os.path.join(path, appname) - return path +def user_data_dir(appname, roaming=False): + # type: (str, bool) -> str + return _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) -# for the discussion regarding site_config_dirs locations -# see def site_config_dirs(appname): # type: (str) -> List[str] - r"""Return a list of potential user-shared config dirs for this application. - - "appname" is the name of application. - - Typical user config directories are: - macOS: /Library/Application Support// - Unix: /etc or $XDG_CONFIG_DIRS[i]// for each value in - $XDG_CONFIG_DIRS - Win XP: C:\Documents and Settings\All Users\Application ... - ...Data\\ - Vista: (Fail! "C:\ProgramData" is a hidden *system* directory - on Vista.) - Win 7: Hidden, but writeable on Win 7: - C:\ProgramData\\ - """ - if WINDOWS: - path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) - pathlist = [os.path.join(path, appname)] - elif sys.platform == 'darwin': - pathlist = [os.path.join('/Library/Application Support', appname)] - else: - # try looking in $XDG_CONFIG_DIRS - xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - if xdg_config_dirs: - pathlist = [ - os.path.join(expanduser(x), appname) - for x in xdg_config_dirs.split(os.pathsep) - ] - else: - pathlist = [] - - # always look in /etc directly as well - pathlist.append('/etc') - - return pathlist - - -# -- Windows support functions -- - -def _get_win_folder_from_registry(csidl_name): - # type: (str) -> str - """ - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. - """ - import _winreg - - shell_folder_name = { - "CSIDL_APPDATA": "AppData", - "CSIDL_COMMON_APPDATA": "Common AppData", - "CSIDL_LOCAL_APPDATA": "Local AppData", - }[csidl_name] - - key = _winreg.OpenKey( - _winreg.HKEY_CURRENT_USER, - r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" - ) - directory, _type = _winreg.QueryValueEx(key, shell_folder_name) - return directory - - -def _get_win_folder_with_ctypes(csidl_name): - # type: (str) -> str - # On Python 2, ctypes.create_unicode_buffer().value returns "unicode", - # which isn't the same as str in the annotation above. - csidl_const = { - "CSIDL_APPDATA": 26, - "CSIDL_COMMON_APPDATA": 35, - "CSIDL_LOCAL_APPDATA": 28, - }[csidl_name] - - buf = ctypes.create_unicode_buffer(1024) - windll = ctypes.windll # type: ignore - windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break - if has_high_char: - buf2 = ctypes.create_unicode_buffer(1024) - if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): - buf = buf2 - - # The type: ignore is explained under the type annotation for this function - return buf.value # type: ignore - - -if WINDOWS: - try: - import ctypes - _get_win_folder = _get_win_folder_with_ctypes - except ImportError: - _get_win_folder = _get_win_folder_from_registry - - -def _win_path_to_bytes(path): - """Encode Windows paths to bytes. Only used on Python 2. - - Motivation is to be consistent with other operating systems where paths - are also returned as bytes. This avoids problems mixing bytes and Unicode - elsewhere in the codebase. For more details and discussion see - . - - If encoding using ASCII and MBCS fails, return the original Unicode path. - """ - for encoding in ('ASCII', 'MBCS'): - try: - return path.encode(encoding) - except (UnicodeEncodeError, LookupError): - pass - return path + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if _appdirs.system not in ["win32", "darwin"]: + return dirval.split(os.pathsep) + return [dirval] diff --git a/pipenv/patched/notpip/_internal/utils/compat.py b/pipenv/patched/notpip/_internal/utils/compat.py index 758aa0d3bc..7f9886a540 100644 --- a/pipenv/patched/notpip/_internal/utils/compat.py +++ b/pipenv/patched/notpip/_internal/utils/compat.py @@ -14,21 +14,12 @@ import sys from pipenv.patched.notpip._vendor.six import PY2, text_type -from pipenv.patched.notpip._vendor.urllib3.util import IS_PYOPENSSL from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import Optional, Text, Tuple, Union -try: - import _ssl # noqa -except ImportError: - ssl = None -else: - # This additional assignment was needed to prevent a mypy error. - ssl = _ssl - try: import ipaddress except ImportError: @@ -41,16 +32,13 @@ __all__ = [ - "ipaddress", "uses_pycache", "console_to_str", "native_str", + "ipaddress", "uses_pycache", "console_to_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", - "get_extension_suffixes", ] logger = logging.getLogger(__name__) -HAS_TLS = (ssl is not None) or IS_PYOPENSSL - if PY2: import imp @@ -86,6 +74,18 @@ def backslashreplace_decode_fn(err): backslashreplace_decode = "backslashreplace" +def has_tls(): + # type: () -> bool + try: + import _ssl # noqa: F401 # ignore unused + return True + except ImportError: + pass + + from pipenv.patched.notpip._vendor.urllib3.util import IS_PYOPENSSL + return IS_PYOPENSSL + + def str_to_display(data, desc=None): # type: (Union[bytes, Text], Optional[str]) -> Text """ @@ -159,22 +159,6 @@ def console_to_str(data): return str_to_display(data, desc='Subprocess output') -if PY2: - def native_str(s, replace=False): - # type: (str, bool) -> str - # Replace is ignored -- unicode to UTF-8 can't fail - if isinstance(s, text_type): - return s.encode('utf-8') - return s - -else: - def native_str(s, replace=False): - # type: (str, bool) -> str - if isinstance(s, bytes): - return s.decode('utf-8', 'replace' if replace else 'strict') - return s - - def get_path_uid(path): # type: (str) -> int """ @@ -205,19 +189,6 @@ def get_path_uid(path): return file_uid -if PY2: - from imp import get_suffixes - - def get_extension_suffixes(): - return [suffix[0] for suffix in get_suffixes()] - -else: - from importlib.machinery import EXTENSION_SUFFIXES - - def get_extension_suffixes(): - return EXTENSION_SUFFIXES - - def expanduser(path): # type: (str) -> str """ @@ -286,12 +257,13 @@ def ioctl_GWINSZ(fd): return cr cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) if not cr: - try: - fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) - os.close(fd) - except Exception: - pass + if sys.platform != "win32": + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except Exception: + pass if not cr: cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(cr[1]), int(cr[0]) diff --git a/pipenv/patched/notpip/_internal/utils/distutils_args.py b/pipenv/patched/notpip/_internal/utils/distutils_args.py new file mode 100644 index 0000000000..2305d96eb0 --- /dev/null +++ b/pipenv/patched/notpip/_internal/utils/distutils_args.py @@ -0,0 +1,48 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt + +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, List + + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args): + # type: (List[str]) -> Dict[str, str] + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/pipenv/patched/notpip/_internal/utils/entrypoints.py b/pipenv/patched/notpip/_internal/utils/entrypoints.py new file mode 100644 index 0000000000..030d934f98 --- /dev/null +++ b/pipenv/patched/notpip/_internal/utils/entrypoints.py @@ -0,0 +1,31 @@ +import sys + +from pipenv.patched.notpip._internal.cli.main import main +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def _wrapper(args=None): + # type: (Optional[List[str]]) -> int + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/pipenv/patched/notpip/_internal/utils/filesystem.py b/pipenv/patched/notpip/_internal/utils/filesystem.py index c1e4507db6..c101c7b6f2 100644 --- a/pipenv/patched/notpip/_internal/utils/filesystem.py +++ b/pipenv/patched/notpip/_internal/utils/filesystem.py @@ -1,7 +1,10 @@ +import errno import os import os.path +import random import shutil import stat +import sys from contextlib import contextmanager from tempfile import NamedTemporaryFile @@ -11,8 +14,7 @@ from pipenv.patched.notpip._vendor.six import PY2 from pipenv.patched.notpip._internal.utils.compat import get_path_uid -from pipenv.patched.notpip._internal.utils.misc import cast -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING, cast if MYPY_CHECK_RUNNING: from typing import BinaryIO, Iterator @@ -28,9 +30,11 @@ def check_path_owner(path): # type: (str) -> bool # If we don't have a way to check the effective uid of this process, then # we'll just assume that we own the directory. - if not hasattr(os, "geteuid"): + if sys.platform == "win32" or not hasattr(os, "geteuid"): return True + assert os.path.isabs(path) + previous = None while path != previous: if os.path.lexists(path): @@ -113,3 +117,55 @@ def replace(src, dest): else: replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path): + # type: (str) -> bool + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == 'posix': + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path): + # type: (str) -> bool + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = 'accesstest_deleteme_fishfingers_custard_' + alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789' + for i in range(10): + name = basename + ''.join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except OSError as e: + if e.errno == errno.EEXIST: + continue + if e.errno == errno.EPERM: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + return False + raise + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise EnvironmentError( + 'Unexpected condition testing for writable directory' + ) diff --git a/pipenv/patched/notpip/_internal/utils/glibc.py b/pipenv/patched/notpip/_internal/utils/glibc.py index 5e3c6b1af2..e1c250411f 100644 --- a/pipenv/patched/notpip/_internal/utils/glibc.py +++ b/pipenv/patched/notpip/_internal/utils/glibc.py @@ -4,8 +4,7 @@ from __future__ import absolute_import import os -import re -import warnings +import sys from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -26,6 +25,8 @@ def glibc_version_string_confstr(): # to be broken or missing. This strategy is used in the standard library # platform module: # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None try: # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": _, version = os.confstr("CS_GNU_LIBC_VERSION").split() @@ -66,32 +67,6 @@ def glibc_version_string_ctypes(): return version_str -# Separated out from have_compatible_glibc for easier unit testing -def check_glibc_version(version_str, required_major, minimum_minor): - # type: (str, int, int) -> bool - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn("Expected glibc version with 2 components major.minor," - " got: %s" % version_str, RuntimeWarning) - return False - return (int(m.group("major")) == required_major and - int(m.group("minor")) >= minimum_minor) - - -def have_compatible_glibc(required_major, minimum_minor): - # type: (int, int) -> bool - version_str = glibc_version_string() - if version_str is None: - return False - return check_glibc_version(version_str, required_major, minimum_minor) - - # platform.libc_ver regularly returns completely nonsensical glibc # versions. E.g. on my computer, platform says: # diff --git a/pipenv/patched/notpip/_internal/utils/hashes.py b/pipenv/patched/notpip/_internal/utils/hashes.py index ef81612bb7..dc15b54abd 100644 --- a/pipenv/patched/notpip/_internal/utils/hashes.py +++ b/pipenv/patched/notpip/_internal/utils/hashes.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import hashlib @@ -59,6 +56,7 @@ def is_hash_allowed( hash_name, # type: str hex_digest, # type: str ): + # type: (...) -> bool """Return whether the given hex digest is allowed.""" return hex_digest in self._allowed.get(hash_name, []) diff --git a/pipenv/patched/notpip/_internal/utils/marker_files.py b/pipenv/patched/notpip/_internal/utils/marker_files.py index 734cba4c1d..42ea814050 100644 --- a/pipenv/patched/notpip/_internal/utils/marker_files.py +++ b/pipenv/patched/notpip/_internal/utils/marker_files.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - import os.path DELETE_MARKER_MESSAGE = '''\ @@ -14,6 +11,7 @@ def has_delete_marker_file(directory): + # type: (str) -> bool return os.path.exists(os.path.join(directory, PIP_DELETE_MARKER_FILENAME)) diff --git a/pipenv/patched/notpip/_internal/utils/misc.py b/pipenv/patched/notpip/_internal/utils/misc.py index 87af02a4e7..9a6fe4d583 100644 --- a/pipenv/patched/notpip/_internal/utils/misc.py +++ b/pipenv/patched/notpip/_internal/utils/misc.py @@ -7,6 +7,7 @@ import contextlib import errno import getpass +import hashlib import io import logging import os @@ -38,8 +39,7 @@ stdlib_pkgs, str_to_display, ) -from pipenv.patched.notpip._internal.utils.marker_files import write_delete_marker_file -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING, cast from pipenv.patched.notpip._internal.utils.virtualenv import ( running_under_virtualenv, virtualenv_no_global, @@ -53,16 +53,11 @@ if MYPY_CHECK_RUNNING: from typing import ( Any, AnyStr, Container, Iterable, List, Optional, Text, - Tuple, Union, cast, + Tuple, Union, ) from pipenv.patched.notpip._vendor.pkg_resources import Distribution VersionInfo = Tuple[int, int, int] -else: - # typing's cast() is needed at runtime, but we don't want to import typing. - # Thus, we use a dummy no-op version, which we tell mypy to ignore. - def cast(type_, value): # type: ignore - return value __all__ = ['rmtree', 'display_path', 'backup_dir', @@ -115,7 +110,8 @@ def ensure_dir(path): try: os.makedirs(path) except OSError as e: - if e.errno != errno.EEXIST: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: raise @@ -270,13 +266,13 @@ def ask_password(message): def format_size(bytes): # type: (float) -> str if bytes > 1000 * 1000: - return '%.1fMB' % (bytes / 1000.0 / 1000) + return '%.1f MB' % (bytes / 1000.0 / 1000) elif bytes > 10 * 1000: - return '%ikB' % (bytes / 1000) + return '%i kB' % (bytes / 1000) elif bytes > 1000: - return '%.1fkB' % (bytes / 1000.0) + return '%.1f kB' % (bytes / 1000.0) else: - return '%ibytes' % bytes + return '%i bytes' % bytes def is_installable_dir(path): @@ -460,8 +456,7 @@ def editables_only_test(d): def user_test(d): return True - # because of pkg_resources vendoring, mypy cannot find stub in typeshed - return [d for d in working_set # type: ignore + return [d for d in working_set if local_test(d) and d.key not in skip and editable_test(d) and @@ -527,11 +522,6 @@ def write_output(msg, *args): logger.info(msg, *args) -def _make_build_dir(build_dir): - os.makedirs(build_dir) - write_delete_marker_file(build_dir) - - class FakeFile(object): """Wrap a list of lines in an object with readline() to make ConfigParser happy.""" @@ -840,11 +830,11 @@ def protect_pip_from_modification_on_windows(modifying_pip): On Windows, any operation modifying pip should be run as: python -m pip ... """ - pip_names = set() - for ext in ('', '.exe'): - pip_names.add('pip{ext}'.format(ext=ext)) - pip_names.add('pip{}{ext}'.format(sys.version_info[0], ext=ext)) - pip_names.add('pip{}.{}{ext}'.format(*sys.version_info[:2], ext=ext)) + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]) + ] # See https://github.com/pypa/pip/issues/1299 for more discussion should_show_use_python_msg = ( @@ -868,3 +858,29 @@ def is_console_interactive(): """Is this console interactive? """ return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[Any, int] + """Return (hash, length) for path using hashlib.sha256() + """ + + h = hashlib.sha256() + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed(): + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True diff --git a/pipenv/patched/notpip/_internal/utils/pkg_resources.py b/pipenv/patched/notpip/_internal/utils/pkg_resources.py new file mode 100644 index 0000000000..b54553a7c1 --- /dev/null +++ b/pipenv/patched/notpip/_internal/utils/pkg_resources.py @@ -0,0 +1,44 @@ +from pipenv.patched.notpip._vendor.pkg_resources import yield_lines +from pipenv.patched.notpip._vendor.six import ensure_str + +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterable, List + + +class DictMetadata(object): + """IMetadataProvider that reads metadata files from a dictionary. + """ + def __init__(self, metadata): + # type: (Dict[str, bytes]) -> None + self._metadata = metadata + + def has_metadata(self, name): + # type: (str) -> bool + return name in self._metadata + + def get_metadata(self, name): + # type: (str) -> str + try: + return ensure_str(self._metadata[name]) + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. + e.reason += " in {} file".format(name) + raise + + def get_metadata_lines(self, name): + # type: (str) -> Iterable[str] + return yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name): + # type: (str) -> bool + return False + + def metadata_listdir(self, name): + # type: (str) -> List[str] + return [] + + def run_script(self, script_name, namespace): + # type: (str, str) -> None + pass diff --git a/pipenv/patched/notpip/_internal/utils/setuptools_build.py b/pipenv/patched/notpip/_internal/utils/setuptools_build.py index 0151ee2f85..83ec57bc87 100644 --- a/pipenv/patched/notpip/_internal/utils/setuptools_build.py +++ b/pipenv/patched/notpip/_internal/utils/setuptools_build.py @@ -4,7 +4,7 @@ from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import List, Sequence + from typing import List, Optional, Sequence # Shim to wrap setup.py invocation with setuptools # @@ -22,10 +22,10 @@ def make_setuptools_shim_args( - setup_py_path, # type: str - global_options=None, # type: Sequence[str] - no_user_config=False, # type: bool - unbuffered_output=False # type: bool + setup_py_path, # type: str + global_options=None, # type: Sequence[str] + no_user_config=False, # type: bool + unbuffered_output=False # type: bool ): # type: (...) -> List[str] """ @@ -40,10 +40,144 @@ def make_setuptools_shim_args( sys_executable = os.environ.get('PIP_PYTHON_PATH', sys.executable) args = [sys_executable] if unbuffered_output: - args.append('-u') - args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)]) + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] if global_options: - args.extend(global_options) + args += global_options if no_user_config: - args.append('--no-user-cfg') + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + build_options, # type: Sequence[str] + destination_dir, # type: str +): + # type: (...) -> List[str] + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + no_user_config, # type: bool + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path, # type: str + egg_info_dir, # type: Optional[str] + no_user_config, # type: bool +): + # type: (...) -> List[str] + args = make_setuptools_shim_args(setup_py_path) + if no_user_config: + args += ["--no-user-cfg"] + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + record_filename, # type: str + root, # type: Optional[str] + prefix, # type: Optional[str] + header_dir, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + no_user_config, # type: bool + pycompile # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + return args diff --git a/pipenv/patched/notpip/_internal/utils/subprocess.py b/pipenv/patched/notpip/_internal/utils/subprocess.py index 1cf2faa9e4..d316076599 100644 --- a/pipenv/patched/notpip/_internal/utils/subprocess.py +++ b/pipenv/patched/notpip/_internal/utils/subprocess.py @@ -254,7 +254,7 @@ def call_subprocess( def runner_with_spinner_message(message): - # type: (str) -> Callable + # type: (str) -> Callable[..., None] """Provide a subprocess_runner that shows a spinner message. Intended for use with for pep517's Pep517HookCaller. Thus, the runner has diff --git a/pipenv/patched/notpip/_internal/utils/temp_dir.py b/pipenv/patched/notpip/_internal/utils/temp_dir.py index 84bba3ac5c..abe9398833 100644 --- a/pipenv/patched/notpip/_internal/utils/temp_dir.py +++ b/pipenv/patched/notpip/_internal/utils/temp_dir.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import errno @@ -8,6 +5,9 @@ import logging import os.path import tempfile +from contextlib import contextmanager + +from pipenv.patched.notpip._vendor.contextlib2 import ExitStack import warnings from pipenv.patched.notpip._internal.utils.misc import rmtree @@ -15,12 +15,70 @@ from pipenv.vendor.vistir.compat import finalize, ResourceWarning if MYPY_CHECK_RUNNING: - from typing import Optional + from typing import Any, Dict, Iterator, Optional, TypeVar + + _T = TypeVar('_T', bound='TempDirectory') logger = logging.getLogger(__name__) +_tempdir_manager = None # type: Optional[ExitStack] + + +@contextmanager +def global_tempdir_manager(): + # type: () -> Iterator[None] + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry(object): + """Manages temp directory behavior + """ + + def __init__(self): + # type: () -> None + self._should_delete = {} # type: Dict[str, bool] + + def set_delete(self, kind, value): + # type: (str, bool) -> None + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind): + # type: (str) -> bool + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry] + + +@contextmanager +def tempdir_registry(): + # type: () -> Iterator[TempDirectoryTypeRegistry] + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + class TempDirectory(object): """Helper class that owns and cleans up a temporary directory. @@ -46,14 +104,15 @@ def __init__( self, path=None, # type: Optional[str] delete=None, # type: Optional[bool] - kind="temp" + kind="temp", # type: str + globally_managed=False, # type: bool ): super(TempDirectory, self).__init__() - if path is None and delete is None: - # If we were not given an explicit directory, and we were not given - # an explicit delete option, then we'll default to deleting. - delete = True + # If we were given an explicit directory, resolve delete option now. + # Otherwise we wait until cleanup and see what tempdir_registry says. + if path is not None and delete is None: + delete = False if path is None: path = self._create(kind) @@ -66,6 +125,10 @@ def __init__( if self._path: self._register_finalizer() + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + def _register_finalizer(self): if self.delete and self._path: self._finalizer = finalize( @@ -86,16 +149,27 @@ def path(self): return self._path def __repr__(self): + # type: () -> str return "<{} {!r}>".format(self.__class__.__name__, self.path) def __enter__(self): + # type: (_T) -> _T return self def __exit__(self, exc, value, tb): - if self.delete: + # type: (Any, Any, Any) -> None + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: self.cleanup() def _create(self, kind): + # type: (str) -> str """Create a temporary directory and store its path in self.path """ # We realpath here because some systems have their default tmpdir @@ -121,6 +195,7 @@ def _cleanup(cls, name, warn_message=None): warnings.warn(warn_message, ResourceWarning) def cleanup(self): + # type: () -> None """Remove the temporary directory created and reset state """ if getattr(self._finalizer, "detach", None) and self._finalizer.detach(): @@ -154,11 +229,13 @@ class AdjacentTempDirectory(TempDirectory): LEADING_CHARS = "-~.=%0123456789" def __init__(self, original, delete=None): + # type: (str, Optional[bool]) -> None self.original = original.rstrip('/\\') super(AdjacentTempDirectory, self).__init__(delete=delete) @classmethod def _generate_names(cls, name): + # type: (str) -> Iterator[str] """Generates a series of temporary names. The algorithm replaces the leading characters in the name @@ -182,6 +259,7 @@ def _generate_names(cls, name): yield new_name def _create(self, kind): + # type: (str) -> str root, name = os.path.split(self.original) for candidate in self._generate_names(name): path = os.path.join(root, candidate) @@ -201,4 +279,4 @@ def _create(self, kind): ) logger.debug("Created temporary directory: {}".format(path)) - return path \ No newline at end of file + return path diff --git a/pipenv/patched/notpip/_internal/utils/typing.py b/pipenv/patched/notpip/_internal/utils/typing.py index ec11da8aee..f133e34828 100644 --- a/pipenv/patched/notpip/_internal/utils/typing.py +++ b/pipenv/patched/notpip/_internal/utils/typing.py @@ -27,3 +27,12 @@ """ MYPY_CHECK_RUNNING = False + + +if MYPY_CHECK_RUNNING: + from typing import cast +else: + # typing's cast() is needed at runtime, but we don't want to import typing. + # Thus, we use a dummy no-op version, which we tell mypy to ignore. + def cast(type_, value): # type: ignore + return value diff --git a/pipenv/patched/notpip/_internal/utils/ui.py b/pipenv/patched/notpip/_internal/utils/ui.py index 78a960cfad..51c5cd3261 100644 --- a/pipenv/patched/notpip/_internal/utils/ui.py +++ b/pipenv/patched/notpip/_internal/utils/ui.py @@ -156,10 +156,10 @@ def pretty_eta(self): return "eta %s" % self.eta_td return "" - def iter(self, it, n=1): + def iter(self, it): for x in it: yield x - self.next(n) + self.next(len(x)) self.finish() @@ -279,7 +279,7 @@ def DownloadProgressProvider(progress_bar, max=None): @contextlib.contextmanager def hidden_cursor(file): - # type: (IO) -> Iterator[None] + # type: (IO[Any]) -> Iterator[None] # The Windows terminal does not support the hide/show cursor ANSI codes, # even via colorama. So don't even try. if WINDOWS: diff --git a/pipenv/patched/notpip/_internal/utils/virtualenv.py b/pipenv/patched/notpip/_internal/utils/virtualenv.py index 380db1c328..1ac24fa60b 100644 --- a/pipenv/patched/notpip/_internal/utils/virtualenv.py +++ b/pipenv/patched/notpip/_internal/utils/virtualenv.py @@ -1,34 +1,115 @@ -import os.path +from __future__ import absolute_import + +import logging +import os +import re import site import sys +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?Ptrue|false)" +) + + +def _running_under_venv(): + # type: () -> bool + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv(): + # type: () -> bool + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, 'real_prefix') + def running_under_virtualenv(): # type: () -> bool + """Return True if we're running inside a virtualenv, False otherwise. """ - Return True if we're running inside a virtualenv, False otherwise. + return _running_under_venv() or _running_under_regular_virtualenv() + +def _get_pyvenv_cfg_lines(): + # type: () -> Optional[List[str]] + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. """ - if hasattr(sys, 'real_prefix'): - # pypa/virtualenv case - return True - elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): - # PEP 405 venv + pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg') + try: + with open(pyvenv_cfg_file) as f: + return f.read().splitlines() # avoids trailing newlines + except IOError: + return None + + +def _no_global_under_venv(): + # type: () -> bool + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) return True + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group('value') == 'false': + return True return False -def virtualenv_no_global(): +def _no_global_under_regular_virtualenv(): # type: () -> bool + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. """ - Return True if in a venv and no system site packages. - """ - # this mirrors the logic in virtualenv.py for locating the - # no-global-site-packages.txt file site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') - if running_under_virtualenv() and os.path.isfile(no_global_file): - return True - else: - return False + no_global_site_packages_file = os.path.join( + site_mod_dir, 'no-global-site-packages.txt', + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global(): + # type: () -> bool + """Returns a boolean, whether running in venv with no system site-packages. + """ + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + if _running_under_venv(): + return _no_global_under_venv() + + return False diff --git a/pipenv/patched/notpip/_internal/utils/wheel.py b/pipenv/patched/notpip/_internal/utils/wheel.py new file mode 100644 index 0000000000..822f3bdaae --- /dev/null +++ b/pipenv/patched/notpip/_internal/utils/wheel.py @@ -0,0 +1,225 @@ +"""Support functions for working with wheel files. +""" + +from __future__ import absolute_import + +import logging +from email.parser import Parser +from zipfile import ZipFile + +from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name +from pipenv.patched.notpip._vendor.pkg_resources import DistInfoDistribution +from pipenv.patched.notpip._vendor.six import PY2, ensure_str + +from pipenv.patched.notpip._internal.exceptions import UnsupportedWheel +from pipenv.patched.notpip._internal.utils.pkg_resources import DictMetadata +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from email.message import Message + from typing import Dict, Tuple + + from pipenv.patched.notpip._vendor.pkg_resources import Distribution + +if PY2: + from zipfile import BadZipfile as BadZipFile +else: + from zipfile import BadZipFile + + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelMetadata(DictMetadata): + """Metadata provider that maps metadata decoding exceptions to our + internal exception type. + """ + def __init__(self, metadata, wheel_name): + # type: (Dict[str, bytes], str) -> None + super(WheelMetadata, self).__init__(metadata) + self._wheel_name = wheel_name + + def get_metadata(self, name): + # type: (str) -> str + try: + return super(WheelMetadata, self).get_metadata(name) + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + "Error decoding metadata for {}: {}".format( + self._wheel_name, e + ) + ) + + +def pkg_resources_distribution_for_wheel(wheel_zip, name, location): + # type: (ZipFile, str, str) -> Distribution + """Get a pkg_resources distribution given a wheel. + + :raises UnsupportedWheel: on any errors + """ + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [ + p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir)) + ] + + metadata_text = {} # type: Dict[str, bytes] + for path in metadata_files: + # If a flag is set, namelist entries may be unicode in Python 2. + # We coerce them to native str type to match the types used in the rest + # of the code. This cannot fail because unicode can always be encoded + # with UTF-8. + full_path = ensure_str(path) + _, metadata_name = full_path.split("/", 1) + + try: + metadata_text[metadata_name] = read_wheel_metadata_file( + wheel_zip, full_path + ) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + metadata = WheelMetadata(metadata_text, location) + + return DistInfoDistribution( + location=location, metadata=metadata, project_name=name + ) + + +def parse_wheel(wheel_zip, name): + # type: (ZipFile, str) -> Tuple[str, Message] + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source, name): + # type: (ZipFile, str) -> str + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = list(set(p.split("/")[0] for p in source.namelist())) + + info_dirs = [s for s in subdirs if s.endswith('.dist-info')] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format( + ", ".join(info_dirs) + ) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + # Zip file paths can be unicode or str depending on the zip entry flags, + # so normalize it. + return ensure_str(info_dir) + + +def read_wheel_metadata_file(source, path): + # type: (ZipFile, str) -> bytes + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel( + "could not read {!r} file: {!r}".format(path, e) + ) + + +def wheel_metadata(source, dist_info_dir): + # type: (ZipFile, str) -> Message + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = "{}/WHEEL".format(dist_info_dir) + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = ensure_str(wheel_contents) + except UnicodeDecodeError as e: + raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e)) + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data): + # type: (Message) -> Tuple[int, ...] + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split('.'))) + except ValueError: + raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version)) + + +def check_compatibility(version, name): + # type: (Tuple[int, ...], str) -> None + """Raises errors or warns if called with an incompatible Wheel-Version. + + Pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "%s's Wheel-Version (%s) is not compatible with this version " + "of pip" % (name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) diff --git a/pipenv/patched/notpip/_internal/vcs/git.py b/pipenv/patched/notpip/_internal/vcs/git.py index 6855afb225..ee30ce38ca 100644 --- a/pipenv/patched/notpip/_internal/vcs/git.py +++ b/pipenv/patched/notpip/_internal/vcs/git.py @@ -12,7 +12,7 @@ from pipenv.patched.notpip._vendor.six.moves.urllib import request as urllib_request from pipenv.patched.notpip._internal.exceptions import BadCommand -from pipenv.patched.notpip._internal.utils.misc import display_path +from pipenv.patched.notpip._internal.utils.misc import display_path, hide_url from pipenv.patched.notpip._internal.utils.subprocess import make_command from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -59,6 +59,23 @@ class Git(VersionControl): def get_base_rev_args(rev): return [rev] + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool( + self.get_revision_sha(dest, rev_options.rev)[0] + ) + return not is_tag_or_branch + def get_git_version(self): VERSION_PFX = 'git version ' version = self.run_command(['version'], show_stdout=False) diff --git a/pipenv/patched/notpip/_internal/vcs/versioncontrol.py b/pipenv/patched/notpip/_internal/vcs/versioncontrol.py index efe27c12b1..6e409c856a 100644 --- a/pipenv/patched/notpip/_internal/vcs/versioncontrol.py +++ b/pipenv/patched/notpip/_internal/vcs/versioncontrol.py @@ -1,8 +1,5 @@ """Handles all VCS (version control) support""" -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - from __future__ import absolute_import import errno @@ -30,7 +27,8 @@ if MYPY_CHECK_RUNNING: from typing import ( - Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type, Union + Any, Dict, Iterable, Iterator, List, Mapping, Optional, Text, Tuple, + Type, Union ) from pipenv.patched.notpip._internal.utils.ui import SpinnerInterface from pipenv.patched.notpip._internal.utils.misc import HiddenText @@ -57,6 +55,7 @@ def is_url(name): def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): + # type: (str, str, str, Optional[str]) -> str """ Return the URL for a VCS requirement. @@ -73,6 +72,7 @@ def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): def find_path_to_setup_from_repo_root(location, repo_root): + # type: (str, str) -> Optional[str] """ Find the path to `setup.py` by searching up the filesystem from `location`. Return the path to `setup.py` relative to `repo_root`. @@ -134,6 +134,7 @@ def __init__( self.branch_name = None # type: Optional[str] def __repr__(self): + # type: () -> str return ''.format(self.vc_class.name, self.rev) @property @@ -190,6 +191,7 @@ def __init__(self): super(VcsSupport, self).__init__() def __iter__(self): + # type: () -> Iterator[str] return self._registry.__iter__() @property @@ -237,6 +239,16 @@ def get_backend_for_dir(self, location): return vcs_backend return None + def get_backend_for_scheme(self, scheme): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + def get_backend(self, name): # type: (str) -> Optional[VersionControl] """ @@ -261,6 +273,7 @@ class VersionControl(object): @classmethod def should_add_vcs_url_prefix(cls, remote_url): + # type: (str) -> bool """ Return whether the vcs prefix (e.g. "git+") should be added to a repository's remote url when used in a requirement. @@ -269,6 +282,7 @@ def should_add_vcs_url_prefix(cls, remote_url): @classmethod def get_subdirectory(cls, location): + # type: (str) -> Optional[str] """ Return the path to setup.py, relative to the repo root. Return None if setup.py is in the repo root. @@ -277,6 +291,7 @@ def get_subdirectory(cls, location): @classmethod def get_requirement_revision(cls, repo_dir): + # type: (str) -> str """ Return the revision string that should be used in a requirement. """ @@ -284,6 +299,7 @@ def get_requirement_revision(cls, repo_dir): @classmethod def get_src_requirement(cls, repo_dir, project_name): + # type: (str, str) -> Optional[str] """ Return the requirement string to use to redownload the files currently at the given repository directory. @@ -311,6 +327,7 @@ def get_src_requirement(cls, repo_dir, project_name): @staticmethod def get_base_rev_args(rev): + # type: (str) -> List[str] """ Return the base revision arguments for a vcs command. @@ -319,6 +336,20 @@ def get_base_rev_args(rev): """ raise NotImplementedError + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + @classmethod def make_rev_options(cls, rev=None, extra_args=None): # type: (Optional[str], Optional[CommandArgs]) -> RevOptions @@ -353,6 +384,7 @@ def export(self, location, url): @classmethod def get_netloc_and_auth(cls, netloc, scheme): + # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]] """ Parse the repository URL's netloc, and return the new netloc to use along with auth information. @@ -470,6 +502,7 @@ def update(self, dest, url, rev_options): @classmethod def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool """ Return whether the id of the current commit equals the given name. @@ -586,6 +619,7 @@ def unpack(self, location, url): @classmethod def get_remote_url(cls, location): + # type: (str) -> str """ Return the url used at location @@ -596,6 +630,7 @@ def get_remote_url(cls, location): @classmethod def get_revision(cls, location): + # type: (str) -> str """ Return the current commit id of the files at the given location. """ @@ -612,7 +647,7 @@ def run_command( command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] spinner=None, # type: Optional[SpinnerInterface] - log_failed_cmd=True + log_failed_cmd=True # type: bool ): # type: (...) -> Text """ diff --git a/pipenv/patched/notpip/_internal/wheel_builder.py b/pipenv/patched/notpip/_internal/wheel_builder.py new file mode 100644 index 0000000000..f81e8081ff --- /dev/null +++ b/pipenv/patched/notpip/_internal/wheel_builder.py @@ -0,0 +1,305 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os.path +import re +import shutil + +from pipenv.patched.notpip._internal.models.link import Link +from pipenv.patched.notpip._internal.operations.build.wheel import build_wheel_pep517 +from pipenv.patched.notpip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pipenv.patched.notpip._internal.utils.logging import indent_log +from pipenv.patched.notpip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_clean_args +from pipenv.patched.notpip._internal.utils.subprocess import call_subprocess +from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.urls import path_to_url +from pipenv.patched.notpip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Optional, Pattern, Tuple, + ) + + from pipenv.patched.notpip._internal.cache import WheelCache + from pipenv.patched.notpip._internal.req.req_install import InstallRequirement + + BinaryAllowedPredicate = Callable[[InstallRequirement], bool] + BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + +logger = logging.getLogger(__name__) + + +def _contains_egg_info( + s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): + # type: (str, Pattern[str]) -> bool + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req, # type: InstallRequirement + need_wheel, # type: bool + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + 'Skipping %s, due to already being wheel.', req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if not req.use_pep517 and not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + return False + + if req.editable or not req.source_dir: + return False + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries " + "being disabled for it.", req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req, # type: InstallRequirement +): + # type: (...) -> bool + return _should_build( + req, need_wheel=True, check_binary_allowed=_always_true + ) + + +def should_build_for_install_command( + req, # type: InstallRequirement + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req, # type: InstallRequirement +): + # type: (...) -> Optional[bool] + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if not should_build_for_install_command( + req, check_binary_allowed=_always_true + ): + # never cache if pip install would not have built + # (editable mode, etc) + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req, # type: InstallRequirement + wheel_cache, # type: WheelCache +): + # type: (...) -> str + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_): + # type: (Any) -> bool + return True + + +def _build_one( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + return _build_one_inside_env( + req, output_dir, build_options, global_options + ) + + +def _build_one_inside_env( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + with TempDirectory(kind="wheel") as temp_dir: + if req.use_pep517: + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + build_options=build_options, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info('Created wheel for %s: ' + 'filename=%s size=%d sha256=%s', + req.name, wheel_name, length, + wheel_hash.hexdigest()) + logger.info('Stored in directory: %s', output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req, global_options): + # type: (InstallRequirement, List[str]) -> bool + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info('Running setup.py clean for %s', req.name) + try: + call_subprocess(clean_args, cwd=req.source_dir) + return True + except Exception: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + +def build( + requirements, # type: Iterable[InstallRequirement] + wheel_cache, # type: WheelCache + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> BuildResult + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join(req.name for req in requirements), + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, cache_dir, build_options, global_options + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_successes]), + ) + if build_failures: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failures]), + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/pipenv/patched/notpip/_vendor/README.rst b/pipenv/patched/notpip/_vendor/README.rst new file mode 100644 index 0000000000..38c306aab8 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/README.rst @@ -0,0 +1,151 @@ +Vendoring Policy +================ + +* Vendored libraries **MUST** not be modified except as required to + successfully vendor them. + +* Vendored libraries **MUST** be released copies of libraries available on + PyPI. + +* Vendored libraries **MUST** be accompanied with LICENSE files. + +* The versions of libraries vendored in pip **MUST** be reflected in + ``pip/_vendor/vendor.txt``. + +* Vendored libraries **MUST** function without any build steps such as ``2to3`` or + compilation of C code, practically this limits to single source 2.x/3.x and + pure Python. + +* Any modifications made to libraries **MUST** be noted in + ``pip/_vendor/README.rst`` and their corresponding patches **MUST** be + included ``tools/automation/vendoring/patches``. + +* Vendored libraries should have corresponding ``vendored()`` entries in + ``pip/_vendor/__init__.py``. + +Rationale +--------- + +Historically pip has not had any dependencies except for ``setuptools`` itself, +choosing instead to implement any functionality it needed to prevent needing +a dependency. However, starting with pip 1.5, we began to replace code that was +implemented inside of pip with reusable libraries from PyPI. This brought the +typical benefits of reusing libraries instead of reinventing the wheel like +higher quality and more battle tested code, centralization of bug fixes +(particularly security sensitive ones), and better/more features for less work. + +However, there are several issues with having dependencies in the traditional +way (via ``install_requires``) for pip. These issues are: + +* **Fragility.** When pip depends on another library to function then if for + whatever reason that library either isn't installed or an incompatible + version is installed then pip ceases to function. This is of course true for + all Python applications, however for every application *except* for pip the + way you fix it is by re-running pip. Obviously, when pip can't run, you can't + use pip to fix pip, so you're left having to manually resolve dependencies and + installing them by hand. + +* **Making other libraries uninstallable.** One of pip's current dependencies is + the ``requests`` library, for which pip requires a fairly recent version to run. + If pip depended on ``requests`` in the traditional manner, then we'd either + have to maintain compatibility with every ``requests`` version that has ever + existed (and ever will), OR allow pip to render certain versions of ``requests`` + uninstallable. (The second issue, although technically true for any Python + application, is magnified by pip's ubiquity; pip is installed by default in + Python, in ``pyvenv``, and in ``virtualenv``.) + +* **Security.** This might seem puzzling at first glance, since vendoring + has a tendency to complicate updating dependencies for security updates, + and that holds true for pip. However, given the *other* reasons for avoiding + dependencies, the alternative is for pip to reinvent the wheel itself. + This is what pip did historically. It forced pip to re-implement its own + HTTPS verification routines as a workaround for the Python standard library's + lack of SSL validation, which resulted in similar bugs in the validation routine + in ``requests`` and ``urllib3``, except that they had to be discovered and + fixed independently. Even though we're vendoring, reusing libraries keeps pip + more secure by relying on the great work of our dependencies, *and* allowing for + faster, easier security fixes by simply pulling in newer versions of dependencies. + +* **Bootstrapping.** Currently most popular methods of installing pip rely + on pip's self-contained nature to install pip itself. These tools work by bundling + a copy of pip, adding it to ``sys.path``, and then executing that copy of pip. + This is done instead of implementing a "mini installer" (to reduce duplication); + pip already knows how to install a Python package, and is far more battle-tested + than any "mini installer" could ever possibly be. + +Many downstream redistributors have policies against this kind of bundling, and +instead opt to patch the software they distribute to debundle it and make it +rely on the global versions of the software that they already have packaged +(which may have its own patches applied to it). We (the pip team) would prefer +it if pip was *not* debundled in this manner due to the above reasons and +instead we would prefer it if pip would be left intact as it is now. The one +exception to this, is it is acceptable to remove the +``pip/_vendor/requests/cacert.pem`` file provided you ensure that the +``ssl.get_default_verify_paths().cafile`` API returns the correct CA bundle for +your system. This will ensure that pip will use your system provided CA bundle +instead of the copy bundled with pip. + +In the longer term, if someone has a *portable* solution to the above problems, +other than the bundling method we currently use, that doesn't add additional +problems that are unreasonable then we would be happy to consider, and possibly +switch to said method. This solution must function correctly across all of the +situation that we expect pip to be used and not mandate some external mechanism +such as OS packages. + + +Modifications +------------- + +* ``setuptools`` is completely stripped to only keep ``pkg_resources`` +* ``pkg_resources`` has been modified to import its dependencies from ``pip._vendor`` +* ``packaging`` has been modified to import its dependencies from ``pip._vendor`` +* ``html5lib`` has been modified to import six from ``pip._vendor`` and + to prefer importing from ``collections.abc`` instead of ``collections``. +* ``CacheControl`` has been modified to import its dependencies from ``pip._vendor`` +* ``requests`` has been modified to import its other dependencies from ``pip._vendor`` + and to *not* load ``simplejson`` (all platforms) and ``pyopenssl`` (Windows). + + +Automatic Vendoring +------------------- + +Vendoring is automated via the ``vendoring`` tool from the content of +``pip/_vendor/vendor.txt`` and the different patches in +``tools/automation/vendoring/patches``. +Launch it via ``vendoring sync . -v`` (requires ``vendoring>=0.2.2``). + + +Debundling +---------- + +As mentioned in the rationale, we, the pip team, would prefer it if pip was not +debundled (other than optionally ``pip/_vendor/requests/cacert.pem``) and that +pip was left intact. However, if you insist on doing so, we have a +semi-supported method (that we don't test in our CI) and requires a bit of +extra work on your end in order to solve the problems described above. + +1. Delete everything in ``pip/_vendor/`` **except** for + ``pip/_vendor/__init__.py``. + +2. Generate wheels for each of pip's dependencies (and any of their + dependencies) using your patched copies of these libraries. These must be + placed somewhere on the filesystem that pip can access (``pip/_vendor`` is + the default assumption). + +3. Modify ``pip/_vendor/__init__.py`` so that the ``DEBUNDLED`` variable is + ``True``. + +4. Upon installation, the ``INSTALLER`` file in pip's own ``dist-info`` + directory should be set to something other than ``pip``, so that pip + can detect that it wasn't installed using itself. + +5. *(optional)* If you've placed the wheels in a location other than + ``pip/_vendor/``, then modify ``pip/_vendor/__init__.py`` so that the + ``WHEEL_DIR`` variable points to the location you've placed them. + +6. *(optional)* Update the ``pip_self_version_check`` logic to use the + appropriate logic for determining the latest available version of pip and + prompt the user with the correct upgrade message. + +Note that partial debundling is **NOT** supported. You need to prepare wheels +for all dependencies for successful debundling. diff --git a/pipenv/patched/notpip/_vendor/appdirs.py b/pipenv/patched/notpip/_vendor/appdirs.py index 2bd3911028..3a52b75846 100644 --- a/pipenv/patched/notpip/_vendor/appdirs.py +++ b/pipenv/patched/notpip/_vendor/appdirs.py @@ -37,6 +37,10 @@ # are actually checked for and the rest of the module expects # *sys.platform* style strings. system = 'linux2' +elif sys.platform == 'cli' and os.name == 'nt': + # Detect Windows in IronPython to match pip._internal.utils.compat.WINDOWS + # Discussion: + system = 'win32' else: system = sys.platform @@ -64,7 +68,7 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): for a discussion of issues. Typical user data directories are: - Mac OS X: ~/Library/Application Support/ + Mac OS X: ~/Library/Application Support/ # or ~/.config/, if the other does not exist Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\Application Data\\ Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ @@ -88,6 +92,10 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): path = os.path.expanduser('~/Library/Application Support/') if appname: path = os.path.join(path, appname) + if not os.path.isdir(path): + path = os.path.expanduser('~/.config/') + if appname: + path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: @@ -150,7 +158,7 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): if appname: if version: appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] + pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) @@ -203,6 +211,8 @@ def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): return path +# for the discussion regarding site_config_dir locations +# see def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. @@ -238,14 +248,17 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False) if appname and version: path = os.path.join(path, version) else: - # XDG default for $XDG_CONFIG_DIRS + # XDG default for $XDG_CONFIG_DIRS (missing or empty) + # see # only first, if multipath is False - path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') - pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + path = os.getenv('XDG_CONFIG_DIRS') or '/etc/xdg' + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) if x] if appname: if version: appname = os.path.join(appname, version) - pathlist = [os.sep.join([x, appname]) for x in pathlist] + pathlist = [os.path.join(x, appname) for x in pathlist] + # always look in /etc directly as well + pathlist.append('/etc') if multipath: path = os.pathsep.join(pathlist) @@ -291,6 +304,10 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + # When using Python 2, return paths as bytes on Windows like we do on + # other operating systems. See helper function docs for more details. + if not PY3 and isinstance(path, unicode): + path = _win_path_to_bytes(path) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) @@ -567,6 +584,24 @@ def _get_win_folder_with_jna(csidl_name): _get_win_folder = _get_win_folder_from_registry +def _win_path_to_bytes(path): + """Encode Windows paths to bytes. Only used on Python 2. + + Motivation is to be consistent with other operating systems where paths + are also returned as bytes. This avoids problems mixing bytes and Unicode + elsewhere in the codebase. For more details and discussion see + . + + If encoding using ASCII and MBCS fails, return the original Unicode path. + """ + for encoding in ('ASCII', 'MBCS'): + try: + return path.encode(encoding) + except (UnicodeEncodeError, LookupError): + pass + return path + + #---- self test code if __name__ == "__main__": diff --git a/pipenv/patched/notpip/_vendor/cachecontrol.pyi b/pipenv/patched/notpip/_vendor/cachecontrol.pyi new file mode 100644 index 0000000000..636a66baca --- /dev/null +++ b/pipenv/patched/notpip/_vendor/cachecontrol.pyi @@ -0,0 +1 @@ +from cachecontrol import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py b/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py index 8fdee66ffe..a1bbbbe3bf 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py @@ -4,7 +4,7 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.5" +__version__ = "0.12.6" from .wrapper import CacheControl from .adapter import CacheControlAdapter diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py b/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py index 2f2909988d..58efafffb9 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py @@ -24,7 +24,7 @@ def __init__( **kw ): super(CacheControlAdapter, self).__init__(*args, **kw) - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/controller.py b/pipenv/patched/notpip/_vendor/cachecontrol/controller.py index 0448910fbc..80bd030f0a 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/controller.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/controller.py @@ -34,7 +34,7 @@ class CacheController(object): def __init__( self, cache=None, cache_etags=True, serializer=None, status_codes=None ): - self.cache = cache or DictCache() + self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags self.serializer = serializer or Serializer() self.cacheable_status_codes = status_codes or (200, 203, 300, 301) @@ -293,6 +293,15 @@ def cache_response(self, request, response, body=None, status_codes=None): if no_store: return + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + # If we've been given an etag, then keep the response if self.cache_etags and "etag" in response_headers: logger.debug("Caching due to etag") diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py b/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py index e7106c0294..c5da06ad7c 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py @@ -107,6 +107,8 @@ def prepare_response(self, request, cached): """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): return @@ -179,7 +181,7 @@ def _loads_v3(self, request, data): def _loads_v4(self, request, data): try: - cached = msgpack.loads(data, encoding="utf-8") + cached = msgpack.loads(data, raw=False) except ValueError: return diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py b/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py index 265bfc8bc1..d8e6fc6a9e 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py @@ -13,7 +13,7 @@ def CacheControl( cacheable_methods=None, ): - cache = cache or DictCache() + cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( cache, diff --git a/pipenv/patched/notpip/_vendor/certifi.pyi b/pipenv/patched/notpip/_vendor/certifi.pyi new file mode 100644 index 0000000000..e5c4d3d2af --- /dev/null +++ b/pipenv/patched/notpip/_vendor/certifi.pyi @@ -0,0 +1 @@ +from certifi import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/certifi/__init__.py b/pipenv/patched/notpip/_vendor/certifi/__init__.py index 8e358e4c8f..0d59a05630 100644 --- a/pipenv/patched/notpip/_vendor/certifi/__init__.py +++ b/pipenv/patched/notpip/_vendor/certifi/__init__.py @@ -1,3 +1,3 @@ from .core import where -__version__ = "2019.09.11" +__version__ = "2019.11.28" diff --git a/pipenv/patched/notpip/_vendor/certifi/cacert.pem b/pipenv/patched/notpip/_vendor/certifi/cacert.pem index 70fa91f618..a4758ef3af 100644 --- a/pipenv/patched/notpip/_vendor/certifi/cacert.pem +++ b/pipenv/patched/notpip/_vendor/certifi/cacert.pem @@ -4556,3 +4556,47 @@ L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG mpv0 -----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- diff --git a/pipenv/patched/notpip/_vendor/chardet.pyi b/pipenv/patched/notpip/_vendor/chardet.pyi new file mode 100644 index 0000000000..29e87e3315 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/chardet.pyi @@ -0,0 +1 @@ +from chardet import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/colorama.pyi b/pipenv/patched/notpip/_vendor/colorama.pyi new file mode 100644 index 0000000000..60a6c2541f --- /dev/null +++ b/pipenv/patched/notpip/_vendor/colorama.pyi @@ -0,0 +1 @@ +from colorama import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/colorama/__init__.py b/pipenv/patched/notpip/_vendor/colorama/__init__.py index 2a3bf47142..34c263cc8b 100644 --- a/pipenv/patched/notpip/_vendor/colorama/__init__.py +++ b/pipenv/patched/notpip/_vendor/colorama/__init__.py @@ -3,4 +3,4 @@ from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 -__version__ = '0.4.1' +__version__ = '0.4.3' diff --git a/pipenv/patched/notpip/_vendor/distlib.pyi b/pipenv/patched/notpip/_vendor/distlib.pyi new file mode 100644 index 0000000000..ea94b159a6 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib.pyi @@ -0,0 +1 @@ +from distlib import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/distlib/__init__.py b/pipenv/patched/notpip/_vendor/distlib/__init__.py index a2d70d475a..e19aebdc4c 100644 --- a/pipenv/patched/notpip/_vendor/distlib/__init__.py +++ b/pipenv/patched/notpip/_vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.2.9.post0' +__version__ = '0.3.0' class DistlibException(Exception): pass diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py b/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py index 1df3aba144..b470a373c8 100644 --- a/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py @@ -119,11 +119,9 @@ def _replacer(matchobj): #_expand_globals(_SCHEMES) - # FIXME don't rely on sys.version here, its format is an implementation detail - # of CPython, use sys.version_info or sys.hexversion -_PY_VERSION = sys.version.split()[0] -_PY_VERSION_SHORT = sys.version[:3] -_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] _PREFIX = os.path.normpath(sys.prefix) _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) _CONFIG_VARS = None diff --git a/pipenv/patched/notpip/_vendor/distlib/database.py b/pipenv/patched/notpip/_vendor/distlib/database.py index b13cdac92b..c16c0c8d9e 100644 --- a/pipenv/patched/notpip/_vendor/distlib/database.py +++ b/pipenv/patched/notpip/_vendor/distlib/database.py @@ -567,7 +567,7 @@ def __init__(self, path, metadata=None, env=None): p = os.path.join(path, 'top_level.txt') if os.path.exists(p): with open(p, 'rb') as f: - data = f.read() + data = f.read().decode('utf-8') self.modules = data.splitlines() def __repr__(self): diff --git a/pipenv/patched/notpip/_vendor/distlib/locators.py b/pipenv/patched/notpip/_vendor/distlib/locators.py index a7ed9469d8..12a1d06351 100644 --- a/pipenv/patched/notpip/_vendor/distlib/locators.py +++ b/pipenv/patched/notpip/_vendor/distlib/locators.py @@ -304,18 +304,25 @@ def same_project(name1, name2): def _get_digest(self, info): """ - Get a digest from a dictionary by looking at keys of the form - 'algo_digest'. + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5. """ result = None - for algo in ('sha256', 'md5'): - key = '%s_digest' % algo - if key in info: - result = (algo, info[key]) - break + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break return result def _update_version_data(self, result, info): diff --git a/pipenv/patched/notpip/_vendor/distlib/scripts.py b/pipenv/patched/notpip/_vendor/distlib/scripts.py index 5965e241d6..5185974186 100644 --- a/pipenv/patched/notpip/_vendor/distlib/scripts.py +++ b/pipenv/patched/notpip/_vendor/distlib/scripts.py @@ -172,8 +172,16 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): if sys.platform.startswith('java'): # pragma: no cover executable = self._fix_jython_executable(executable) - # Normalise case for Windows - executable = os.path.normcase(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + # If the user didn't specify an executable, it may be necessary to # cater for executable paths with spaces (not uncommon on Windows) if enquote: @@ -285,9 +293,10 @@ def _make_script(self, entry, filenames, options=None): if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: - scriptnames.add('%s%s' % (name, sys.version[0])) + scriptnames.add('%s%s' % (name, sys.version_info[0])) if 'X.Y' in self.variants: - scriptnames.add('%s-%s' % (name, sys.version[:3])) + scriptnames.add('%s-%s.%s' % (name, sys.version_info[0], + sys.version_info[1])) if options and options.get('gui', False): ext = 'pyw' else: @@ -367,8 +376,12 @@ def _get_launcher(self, kind): # Issue 31: don't hardcode an absolute package name, but # determine it relative to the current package distlib_package = __name__.rsplit('.', 1)[0] - result = finder(distlib_package).find(name).bytes - return result + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes # Public API follows diff --git a/pipenv/patched/notpip/_vendor/distlib/t32.exe b/pipenv/patched/notpip/_vendor/distlib/t32.exe index 5d5bce1f4a65f0bea9636a5a825646c520903df4..8932a18e4596952373a38c60b81b7116d4ef9ee8 100644 GIT binary patch delta 28024 zcmd_Te_YhX_CNmG1!UF5MFm9Umja5v_b2SKu&fD)2DXZVl%l56MwX_#W?gadE_B^i zN*gOHEeS1eKg}PhrG{dnrDbJBX+<}04R@{ZYDz`!_jzU)tb5<@&-eR%Jih;ZvGbZU zXU@!Pew;aT=FGgdnzLawSHjk3%Q`mXOmwY#{nUyRXP!&zhTERWN;`@8$Hl$V+C`Y1 z)*?bh+Up39?9im0MtI^(W7^|_t_<=UgJG@+r{B52PGw5}@n_4Iq#{|EnrOcNuMIRRgk0*Ey74Z2OA|z>U~BgNuiRI*~O;IhWArP$)s%JnE7|=ce$$-k~6iv7$`puP^AbsSbxVjhi6ZrtvlC2Gpdk z$uZV4>={4iI|6_reJrsRdB`e_{u;F29`RoWbD=c~B{6JzU+U~rVHbZsP@pJl??6fNcC!3l@q;OArMbT&ywKdd{6}AiC%=QZlM>hiz2aT}giw^1 zqyG|0?e?4rf#EOpI~Aq=Xa&X=DUJf)y=8W(FLmn`8@krUQ$;Ua@pe*4Pl_>!GGBL2 zuFvnZy&VZo>Q-O!ABsc`Zxrc_BF3^(UNISM#`u|!JJ0$;bXN> z4z)GP<48^S8nVGwb=Ci=4T%f+6^vv{XzDDHZ4a`QmskB9NVN0M0*MZh_(UM_N+5AD ze>0G{UL-z%#Asxd**_U*m0I#rXG;(+e{^04ds;DsYG!V6=1^~0rLqGw(WljKOy#Kg z^l3H>F$U5for=Wz+SE2sZGJ~;tbHfSF5}Ps>i4^kMH0oEmzrAAXh&!Jqm#i68al5C zSd~A3$5I*8je%%7qV~`cO6sqpD5ty}^vHnnIECSvhEee56kf7QZ>1|Iu(7>UWn0+v z-eczJk)?#s$581`)-_@7me_3|P2(9TrT#Cms(s<^W3iPOoPkve8=ufWD9qhsRAS#K zUK%bcuXMM_k%QR#5&MAt?5o~`ru>3t(-d$j+{fjWre4nPRr`8TdRs8Tq0lu|dU_$X zjE~1~mt1SLpF+7q{8iGdHQ&>;!KAnGM_6v3LE%yaHS=CJr_UfoA@CA@S2$bMXNcM^ z64bv?KPvGGs>g78kEQ!PvEn^GQMr+ktfkK=RkylE5BZ}*J09(u78!-{@TJx-mHYZr z?l+mC@37%eb){7pk1^=#N_P&azF`i9XYct=zuy(!dE{-%O{}OR;tFEc; z9OIn2(qVs!H z-)%{;r}smLY($4tVNroN%I5}}w5mHppO9KA+cSXGa*w0u?_MSz> z{G!ACd#RFT_H-3SrL=@=pwZmN)Fpg42LDw1i;y961g*qQ`Q_&Jk%O?#DD9DvsH8O! zqVD%pU(hvq=P1lyqam2GQT*csNotW|$WY)b64=UqLtT$U-FxyCNRHr7isV>KS>H)4 zc$FrzGnvMxt(yT9x5u%0MO zxojvk`2uRa5h?Ct@&8%n3-P(ZI^RKs1nNwdj$O55EU+UhVu~_;Opfr2sMYLrz0`Oi%dP~3jW|Yc1vVI zL;{Krc1%+odoFU>fU#MUq}ta(0|%XD;|W=8cz;W}4fU5wo%f^H@*`EXYiO**kPPBF z2t=@qPY1Jg{nxmtv63^Eud`sR68WduFz@`x!M`(!se8^5>TSll3!Cmn-z=ml-&yB( z4zE&Hn8NlLQR|2jz8+ z-Y+FjkeW}OI@QorN-8R<0%e5nmoSTbz0Aj}!@b8FDWxhD`qoOdYTQ;ptGF|xYJ-Q)=4;}Q4`R2+}Q6#aw9i&mIBmJM5T zeIAOxFV6e^=o3eN33DHboW>jSBx%i5rBs93`}(+#C&P5IqZ2FU1mq7^hPyMWI>y}; zY2%JO7{K0q<(k4k2Jqx7-A&2RIh5x{7STHMk!49a;*GIu<|+Lyzkl=*v_+IO9d$3C z(;UdVxP$eNN*6j=Kh z89IZe|vczC}893d7#t#@oq?0jYP3^yQbB$xRAxai_Pn?i_U4+GwVU8Ju(l zO~KnR=lYk3db|_N^Xm0cg7^fjKX4ZGC0XEMna#}7kBVdwk2k|{HbtuDMlUf zIPA;doG?YPaf3$4idpHPCGy!ttYy%xy}!5_eTc*^$>)w!gM=S^V;1}7?DW%Swo?!bX19cZ)@SEmbaHf>;6dDt#k4PK3Pg%zMq4Ln3 zFuJ#kweG)k=g-J5)cs1TNT64z?e{}pRGRV?&QM*`N#=?hICw0U*L|TAdb7Z)GVbc3=Wl7$X=RTp-HvV*pMC;aV;AZ?|vt_rEr zMqBh-YpnHb$lV+nMa2Lvm#KEG0fuK1ao7$7jW(E3o=`Lymmc>uR1lw8n8i zb_bByP98Ip?TXjQznQ_lkDsbuPowY=ED6$>3UohIOEg~$La>@DptKOB(^40Hl`Tq0 z>30$|(HBLqA}`QVf4x?eMMi)t$^kh>pfmhof?I?1u3E)`Z@6CIqdO- z@sS^39?PJ=7DYgN?3FAcski){3KpA`((epa?l?B- zRpFdxu*pf&BYv%rr23_@DtRgI^&{JnbW79>q&Jt*AMXp>2&}~h);D?MWVe=T+{sf> zwNhS)`rG&nR1W48>KEqz!0#L2Er^E->uclBU&M%2#!KbyN~J_==A34jN*z>zB*v4C zy+@<$=2D*1!A1^Eo%&G6ANuAZnk~lyWf?1UGXLA??uGn`bdoj{6=jd?kD~rcTcxWe z!&daBvu`Qf6j+Q_V*59=$dwNJ5Z$Gyisy4+=auu*X+c6HFA{{Aiy$D_f&0gZ9#&Kh z;Txdf%l*-g4y)9-nO4T97GfMO&^UauAjqZQcM%t#ihe-D6;-|YtwgAjaSheaeB1#8 zrf?F`8fZ8h?RePyhS>=GhFN&>ly|Kfxnt0%Dr+<>BZ-erh3W8y4llV(y8!LeJ<9dL~??r-2rEPtFBA zE7wIwsws#F6+uFqAcmb6L0YQ_#-2s6`H?UMSiJdCV;7>{{Ln@TacI6qCDH5JU@f$S z6(UWBe+`uAYV1NEwS;xx**jEfY6-gnOomA&R$otDEAlj~g)!l&I}g+!y_UMziZak` zkD-&xG4Y~nL9`#l#@HWS1Js>E3=+mr#_uH_JCz;qkHCUh1(x7?*vow}vLV*IyEEl2 zS@Eh_oC03T?`~&5qzxL;Dk4(@eGl2_>pmX2<~<44nQvHCxc#C?9^cMJ z51$ls_D+)QC`dES&SY>BQy*wOc+OwZm$ryXtYanS~jr zs(+W7zpa3^4j=AXG7b#b)7Nec5FrWrouz#JzaW|A!lv_v?!0PMx*bYeDgR!O3w#^A zo!A`t7E~E|42hFBLRf#Hufv#_YTt_0q&sWCE?S2kscRB6=RmxG|Nc*ukN6;TeEd$| z5WFP7N!AzWY~u+1ZEjen&EDz@Xonk9F1Q%Nj%$p9tOix%nv3aZ@gHm{Y}m@&zxf^E zB|IBipxnKk4Er0aJ4wgSMI{`A~7&yWG%h+X?>I;8x|I+`y zepL~(Q0vgs8;7xHM#d)6-U~14wZAh3#yPC7W}eke>Plf}M~)cPDZf>fV@V~SMIRZ7wUnr+^R3cjb3-7{vKJnU=s%$S8PKVC%+ z4c$rIM>@a%JM3!FkSb}qy)@ej*V5q^BuOlQGrfyb71O7|RTaheid9cJ=cq#J*$0>M zXY{1<&!9lz2U*33eG4ttlx*%0E0~H85OFKw^RUUH`7$9zx>lf6qH4~mC1v&5b+H&a zcdqmnIp3VgCH$onm;X33-hL&{wdUf=&_4>M@N#;cN*MayYFf$b>vv$?hTfs)p*DJMIVhf;;(10quZU-> zc!`6tHO_ThTiI_+lox)qpH6pBbz_9 zZt!F)2H#yzjYA7IA@J0R*42qt*NOJlbqYGQxBdIxv=2Yx=E;sJZXV*uaC3ztZFI9E z8ZlMf2_(DgiC@uvP~a|#gtqVONszT~1HMKu)j%ZMA49bF7Gi6E2xz7Imr>3!?v~KN zSa@AsoKB|2u>+=+6JR;tnL(JP<)HbP|d`_a| z;eRffQmN$rqGZtz?O~!!!v9z%#oPY{gMymhIoVO>?zB4!+?|UZusiQl)qQ}ZK&vER z?M-;V{kPcV929anNQX=Z#;v^(seyiqY=5E$Pj2@DhcD3Gnc+;Tbg!1{ow4rYp~39v ztzhUs2&*t zF2`y&pWuib-RyFdLpl_rn^kpRhk@Zpa*L2NDk&h}fqt;M4^avpiAyOa9;^>&ml3~iNX?|wFQcWOA5gq-aD z-RUkWg?haG5*BIqc8W^v-_wG9qn$RsbUv!zeuh{&m#QA=L=IJL8=mgYMXGg&1^o`y z!|gbQb9dh9xYylzk0agPxkOd>9MS?+u4$uJ zY8RjbgH3fPE4_6jX*gT(DFUdcSBp?bqUu+u1BKeK2HpW3CV2(n+7YRS>iPm0z2V-n zP{bNL&F_rE^7k-=I_ut{16ObM%%Qwv=Bx!Xoj|kTY*=t0~>P+um5|%Z`=YX-4 z-vdLS+*5aqeYhve)#~dLD1$ih9>Yc$A9$6gZauPiCv|!1HUbF_jeqYZL{x`GfA~eI zfq}I7_mV`_i(Dlz{9|DY`#<%c@gAZ%3`g#o@6jB|kr12{I3T3acKPAFFr)^V%gaU6 zsAaK`*Z_;*NbcdNF^_fj4-NvFGchouO4@rtKN5U=&6y7_Wd?VXt1B%SMM zi6@o*7BaTf5yz|%VE)r6L!qJ97lXA*J*(1FcM4oy!Rv0ChWPu4d$zRV***zO7$)RU z)fH3On1e@>9kJ9U?B@9?as#Kh{ykdc|FAQldfbQhvlsVex~Lf>JYKOJjXp$UIQkG#JrFU!e}~eU z>PrkL&8p;p*1U!P8;2o*HQQhA2(9zGzQS<`xV?uq(^J?B^`hf!^+No4Hhk%UURIQs zJH`jlQ0M&NUhx`VT$MUdxVQNhzVSbc|A+gKnBC61yuy;GJGXki_Yn2U4UK__f9K0+ zO1)4iWwx7n*c_NkNA^cYoF`79@p<~s?A!fgRDu$y+FhC8N<0wbqW1oGTi?F&pKWy? zItES{;QqaY@M|A~W4oZ0djlrSKFmQXj?Vwadmp2f$1AKb?}_%i&^j*1-aqJ8+WwQ@ zUvp^BZ&>A|_Wnqu2$lZ_yDFeLa0z2>1?>{v{*aua>tD25rHvM=bR3Y9Yn7G)Rsos; z?;))UNBxjZ+GK9Vq!#C^j~!{GBLnY|z`g<(F6vG?hE%obK4MG9_LkrA6Z^~9)B%Ab z6gnor9zyHR*<)m{k6kWr7{P{*OOmCsf^pfg*n@~$CNOd9IIKaNe?_Q2dTR%JYFx4W ztKqDD+-P~+ToyAvQGR+3%NU;`-!X^XHhx?Dx%a8|@Qm1KPnrG$y5mW*PC6TsN3tR6 zB)RnmmaUF8(OF6cTt0E|Z|n$EMYZO-lnQcqDIb4^c6oH*;+bUk8U*6J0M>0CtX@5) zm2l=5Bki8Q^mkvj`}*(B0e@t987#hY?(1F7 z0p9!$uQiksnNpL2{fJtVAxmd>XmrtgM6Fb{r6{*PpW_IN+W(p+T8=(CuE~#jGX?D` zz5tK%T_@GpCD*@hO=n5kc;wI47UEf_y?Z!SOjS37^v4N*&lrF7p6I10`%>zqq<^tk z-94^gH}v-w*mD9MVdLSa(Gi{sdx}tVMxvK78XzEqmWbzdSCB zh0EQGy0B#mZPb$(OjI-)MW}4dMyP9o|JdCbRvlUCEt(iv=~3pVgKYKT5SKj5IWn}g zI^vmM^M^#eladR!#7ZjS5M8So}!v3q^Gml z`r)^TaV_DoMC8+W_x$jMx)-&h=E4=I)B zH<}RR-vNbL<9+ZfVmav;A;%EYCVDfy^RY6B=o^&84}r0~yneNbX>Jz|B`1a6psyjiyCu?JtZ=rJ@?|HPCNozyirt$TJ8(UT)C+Kv z@K`wnw1Xzz=FHUSH$lS0F&nxFaaV-$3P2dOvtRjvAhs|No1H^F z;t0cAN{%yI*5Iff_3002jWRy`IIGSYKORkVM6L0!AB9i}AB8i#jE}8WCNk~hWD7(Z8 zr!TRI%mE>73`&!#_6Qn4qRC!2N0k%5*FeGy(^SZ7uzG=UZVXf}AfJta>WM=6cR0{M zA20Ccka*jhW|EkQw}0k8(+GJ<%uY(WAuk|;dkD$O7}%ZxSz)uYGv#ZKvB$FY@|O46 zd)ZTC)32f&A<1Bmw#On0=|9`SG&w_M9jqj0MBGDP`u(LmWeyEPKk*EKDk|j{Z)F>E zbS_ecj=}cz2>GK05N{OJ2fiYIG?B{r^eH$;SP3VueI6dX0a{30gql%WW`Ff@P)?)F z{j-pEcs3q5Pca`~uJVSX7&+!|%)6_VSOB+*fm(}l z0(%hur`}f2oHKRj9Cp8P~|n%7uH^4Nx8AT*NIr$`b*53 zJ5pY&Wq0Kcm$!e(SnkRZP1miksI2bJm}NgwJ-wmt90RGx(c2{x{xqhwjx5N<`4Eox zWUAV5P^&`tI3hp-A&xaLdyw!WD!b--7oMux3^3Wd{OIDS5a|E4Xi*t|`Xkn09q4)! zko=%el$B&Zg)BJvT<{Ss2XwzxscW*)AD@1X#{8^WlB}pl_KH!Z zyakc^(0me5?}^dxSA~4|i~Sf182x#Pjqk?lCH(fzL-vCp9;S-mtabPpk$|$D+x>e( zp|wC9&_ml`k4%4HOan>rpqQ%P(}W88@Yc(*du+kAhUcRn_9h#Ve(!ZHFE-IT^(dj0KL!IAYfu?TjU@3WH zh-+HjFshF~`g{j_Aa6+O`oqxMo#jw)BTQlYQTY`~=!c6aq5eUR?lptR4&)gI{cmYu z`KhCJmP4_1q6L2E7*oE|@qZ|)B!7fUc`LD#FbMw0{{uhj6GG-<2bN6dq2OU-q&;~6 z=APp-=wrH&fWwUqs45-ORE5m-RJWOP=(0`q4DU5@USf*h2_rao2BWfN@JN{E;$S<( z4?EBwwJKfk^_-w^G+$8YoaKQx!A$2V;|pTty3g3;g6!VU;s_lF!;9vzhYRKn%|Oh? zKb!~Jwbt?>P2|3%L(wR^mFk)PDN{{K7>^pM>fM-1qU8F}ohGBB!i(zf)fhQcggE7@g`8elpdzsWnK$JYoL2|sLT;j=Kr{iPsIfbc=pTP#}#X?w~m4M zo)?^Ry}cl}y@}HqV)K7|BmKLI+k(5Ze1%{C4i)Q{?;Uh(OJqgUWdIKD3#?fo*Ew+7=|k*Vkk5VXeP?ADb|F zVEk~D)?C$xj|}z>4oq;QP+CtfXGCpZ^y6@9fGO}-glV9 z{n+8seTQ=XGceqDu!;K)za3-0P8p(nFPNIfk9^36-aIhkWe|e*8*cfKO}M#4-qFDx zzj>6a`-JP%wECh{#po06Z&V>ihMeqsq`dvsJLzt8kQ&}i%>z{&|4hR@NZdzE!Oql^ zkMk~N`HsKhO4J9~R<=kW!thfj(|N~u=%^i5M+Dy^l7=D4_ic&p+zy!|hGi6~)2E>O z#fgza$t?n>)1nBR0P>Ga5KHJx>}Rv8*uzERA>rk7 zMX4;*c5k2CFxTD5xR3fibJ^mGz`HDoYe4kG@<9j!t>kZFdr8fl3a_U3IMizy^ehgw zYLLMj4qf|Dks4kv8LpRI0V^_pahmdWvR{g8W#BXRF7#FI}q zy7(oySiG&!QXOzqC31_%p(ObzIL+2gi<3XGi0zp6y!=WBvrXTlyn>m4KIy4{@Rt#xuHJBhjVm2AlDayM*9D1HHwuUV>ifvsj9wN! z%R|(erN$}TNE4jXyB?j_U67p{6}m+f}?h|g`F=QJnDAjX+||l zc-TTxfI_k{v(fx${*qu9lX+?jODaoMJ^)JKeqdq?yQOTfrB5yy7D`_Wgq1D{DR??b z{xPMyI#P1HWpXN)_v32HHLTpD@Luxms*0Hf4Ih|xY-`!j-e@egBJEcfvUkgD?EF*1 z~h$V*#~7~S@PDw?1?!uWe>9-<~$f7JgRM|fQ{cZhSgLolYcduU8(q5 zp7uTaWUfYzBY4HU3G&!+?B;o+ee;s!$>Vl!npY*0UmL|jZ@&*^uDX4@>```P zdo0UZ5G!w*#AYlQB{%%clAa&MUR;nQfBiQ0-hy%R!IRi;3&zNO1#INPaiOy@5!h`D z6XaW-VGk^v=5lWqe!icaR_Vv(Rw>Z|{~q83;A6mG&?m33{_(i_@tGKAG6`_s2g`F% zQ{n1cF|Jof(hw@K!RUni3Ok7Q2_($4Fp~jl)I8Sj>7ne(D}&-DWAS(AE97pAzuc*E=Z-=PbG^-6bBLwAN}jsO zug)C$5c-f$!5SPm%ob8*Ml>i@U+rUWzq)+L&faKa;C$IDyt;Qa^;W%6t3K0v0xN!P zsQex8?xnBA$>r}JVC!DLi1EFDmo9X}P-?}8Ki4tbwltmr_p{H6`;o&yU8 z0qp?8U#-#vz~2C?k;crGi3`rY+|vd30Y!)SUG3jta?`ShPKx8ZZg9-(;b;a&+1tcy z{(40;fA|KE259>uNTy?(;tu}mdUmjKLBDB5q%UyC*BQ^{FSD_C7;X;SZ&%fA0JHc+ zF^=zlsi!u#0S(lq7U|w%CH&K0!xs0uOwPy^QM~>JtNJDO${i0!E!1M`33Ip0+p7%z zFPk?<-@$IN$H-s*h}~fyDwm4bL-yhQEDuAneLk3eUIQP;4%;=Z5im!v@W2O2R}!)K zgpuSgqI9gk27HZx_u{}ir@Vfp%rOj~67nmsG>{<#vt>dlzWAEC6*5S>H|Jz`b7;MO zHP|KRJAP3Ff2JO(X;hlzPJzFIZa~V=EL~IiZF4KeG{e<|7xR^ZjZkq-^+NYP@V6cK zmV}&b(b6UM)SY8oeew3drX&KyE}F(Ib8o9?#_bj^4W*t|`o8PAy@HFt&b)v=aM&7X z!FFGlIeTBNp`MlDCfIekGybYI4~9yWoKFF3l?-TrgX%Yas%Pe7)5whU6U)Y z=#uh)#=sVJ!$b@b83lh*6XnODCvf8Iz@6mtXoAou&@kdcWTgGX05thB=sj9v8zQtm zdJy)gtahqp-MN(tGq!qS#jG9{t&1Xx@G8B!)SQsz{_bLzJa_mwP<>`Yq^xa+`Rjf zyYghh@8S&OA3_B|p6Ie9J~G6-i6vW!Bs)9DA)8dn*Yw9b>m9vhbhz&PS5@ewGJXUG z4($p(IV8I1r}qLSl=yU|+=dJ_ByqDy;_%HV12=fdBa>T-&Hl`IjB}X~xk>QPgj}wH zz@6}mI`CW%2^>{Na*y~gfrGr!8~I?tMd+X9ZSywv)7|Crfs@&kd%l&AD`exB43+mS zWYd>S84bxrdvAFw_&wF($Sm?~tWAhA)9xe`u9pV7Jc|9imbEO&lOKPVMJ!ECrJJ7~ zaSIfiCEBs#-c`?ieq~8Ov}Z0IE>~P))k_oPneVdAOOwZGA4H>ZPZghL3MtR;^3i9S zgp(YJB1-p5krSlDc(BNO2e=)p4JwC9_s6p>BL_^ZxW_*{@$qzF{Wx7FRr&O2fU z-iy;*Z!hl|>?@A^8s?3949=EtXA%st5zeuZ%gN(SUJUbZ&cXbvEo`APWn2#SC;>eg zSLv>9jdKq4w%{P#pTR#ODFkz~<0zbo5-3sM?%wO{EgSfgo>C(F9(A0;&1@umrDq?m zh?So^&3r4S4Z62GSyej)ofKf1t!E|ojgi0lG+T3DynNI*?AiNb<&od81NSA$??_{x z-!}}k?7D9#o{1}sc$Te9lNWWc)hiQS@el%xN6*IKg{RPvUicyg&HN?ItE?v#ihWdl zo-DK``8p~TYryPoZ-6O@-hKZDQ(%NVq5ZrViJ8CRds6!2>z;%@hxdaIAf=DeyT1c; z#^M9If#O5JfxIsg0^;=AlWg4m^W>(d*!ufZRCxJ~H}5g9qxV;Y2}7GDJ@DjU#Xx-7 zE$(qPV5lUU`yze#O!oDM54_aJ*Z@@q^Zooy?7_bbmpkugul_~X&kAbr+9xso^p_QB zL~=%YrzySDm8NO<6n(!8r8SpKd#eM+jjyjj*h8y^xgeDEae<02d%Rm~lVi262^t&D zI%wJdJNT4DqunQDx|61`c&Jo-2#qs(R_}S3J@}GA{*U|Ep_dlO zVb~0N*&y%BSUCd3)+1HK36AH&}hp_(^~P|x2IVLab0LL+}kgxS0iq5GKC?^Bh)QWI)UXY1$v6myMc;2pP|@Xk+7f8 z83Ju3bh1Ft6KWA?8=*RZULZ7Gpd6@+CkXr^kp~F$ETQ27?I5&k3H87gLN5ullhE%3 z+C}I&fl5#;{ER?D34KSP3PRrmD(bAH*vldzlF)`FE-E{U@W%x?mQb%i;|Z-1XfmNr zfu<5#CD3$2=L&QTp)&-kCUmku^@LgkY9v%A&}>4}1!|R{|9OJI^N2h^poN5n0~M7n zqS!C@P(^Ko{wUBHgnlc~S%kI83gDaXh*!b)@TaawLK0tx~w+1PH2~gDLFnf@4r=YCRn+=NvrIeQu<y+GLCR+V zN~$?*c98N;fMPUORw9Ma7-7vQqc=D))CP z^Yx0k&aig78n=R5LeN>xs3Yxi_di11zbKZCyu96?`k1tft$r{jG3s?J{i*elyR6cB zgc}iEpNfYK;CP+wdT@jsoaY`)r9k$#_%sQea3fqp`USvwz*)d4z%jrBjFE85LeYX`?{MIvI9uzn}-vw*H$%(6B~zIivB zyEdHy&sudFap3JX1hWA10CvDqz)C<3fbC{qtu2fSyyRsN_5h9nT6eQ7Pbo`&XreM5 z<`BP73*XPo2Dap(Fb5<_)r6+3Iy;EZ_f?Hq7)>5NLIy6*aig35|I-qWDRdz)5|X-$2Lq zbfp64^nBe=dEIE%w|oo~d{PmDA-FSFn)mM-2Nq&mhQ@yl)X`u)MN5M^Ln zo+IvOd`7><5?bp2tg#klqbB5|?B4lRh2n5{rYMK58U@CZ@(X5c!^5g#XH^Vq#xBr-(H&8^@mLvRIWLnJ4S+tQil1@c=9g-8Qef`pcGXgMd5c|6p6SKV`g_h zk`h)sA7&Wqmpn*$50<@vZW`cX`yVNg$GaX~Z&H4|Ln&q!|%crkq@2(%73giuX$SClu$Ks_AV?^n1Yj#Qz3@7yEs3 zdfRv=Vlm~oz4Z^i8R6ly}DtHTw{AmQi=39W0itzS=R0tp1^^*E{y0_5y zsLohMsmT)0zLDYUzre$q0Pd@KMw|(%=;96CFnTgdY?Dj*I$Yml{n#LR(i{vb#e+;7I+94G%3LM7A+GbUYzx8^@FHrF7#6d20uAZ7idpb>lqw%PB1D zvC(o@3cKU6FNs+2cq;|TPrOeVI-VF$v7wuyo%j?1@doR8KpgZ1o z{^4BVXpr~0R1m_V0?c%D`QPhMnn|fV4dlt!v)t7Py+73h5gArh}Z9+*DnM; z`1*ld-jj_+tO0fYgJW_4|JKn{fd_&5zPNFOhnaaf;0^GGW2@T>TV0vs{SrPa5-)=q zvfvQ)D#$X2ri7ovRY&r+sv+RentcOuN{4&61bYF_@(Z}@#XWchE){zee6fQ(oGK1N zpb1_o73U)vvG!MB_hE-_@jLs`$%xAP-p*V;{S`L*>0#KTxt^XmbPu|lJC=)+k>7*o zZHo{M%BNyE`}yfK#eML%|JQk2<>uS}bYxP05CsH|OcZov^6lmnx$48+iY>A7r0Orl z%zUWsC-9UzhI#AH0IA=59#6jH?=1J(WO?!5+5Be@i=()EB;i8G-$AWx+@8RP`Kz$#2>DO=S zvxUFu5u`i2pWkT^SIjhT zro`)MDG2w(_tG`p=8NI87j-w5FI>cyzL`7*R`c=_Fa4s9_Y&Ih$BPSrI%l}E)qaOs zlz8yXL|33hx3U+mwECj?r&yhWy5%dNaQF%r_6>`Od|ztF=DzCSs`;`lQ` z9(NB99}d*_NuZ?x_hKPA8{6=ECkpiEX>X`P-1*@&&<$Y556p-=ZsN9E_eHlJ6A@Q| zDilrwF7iCHo;AD`L-*Payfub=#`SqdI{CFj>gb2RZbwaMD4 z4?uU~LkMr^AuM!j4<;r)K|H#16LEg$UX9-?9U^XD)fdnkQT<~FQ!=U2R}0V|LvRE@ zwQwJ|`rO{?RK`Oa3h4OadAF-VIary$q%z}P4V*VORT6Vtsfl=`clh~0)@YuxtfL# zAkvXe`L$NsH6I@xwph#dHI0rs{vc}Dfz~>^U^UTibhNBtSDP#m-U_N*3@x?eD%gZ0 zgXPO7u-Qk3OrnCY$Hmjxhi72LXcUy=&XwXvJaE6mp~FvmjOT0iAdSv3`O~{e3Ez(& zN)d=-Di5h(hmRzWud72#8tG68Ill0yO@ZHe8CVs{t3*RdENwO zIXWWUvI|*5nr%D_jC3+)zV5gcTLs!J3=5`}@rO%T{n3#lXF~uWXDWB`6+iEq-QDeF z{B?9a2GD?U%htY= zD1Y+bY{xreA>yCD^SZ@Y-Xj9%W|2tKuSG=6hUxFYPhSSrS@6!AZ38v4`_9{7-ZhKe ze`1LI>sjpa6M5;&a#6)5`~U*3vsDVb=Q`W3M=%0tGp@7U{%r5x z$Lnm-C1klyf$y5c-ptn4L0(L)tJWS?#`XiDvhoUNmZvk3#pY{+obZB?0-8nl^ z^mQC>l4r4jB5tNT5vQ~YJIwKqO025v^vS`NL%5<8{N?XZ4aOc&&nCMa@LhLrAon2OG0MQW+Bs#)agj;{Z?Jk5igxE<+ZNFNj zAqbZuq#aW-LR>47+J3Q0NeE*RCL`S0X{Bo*qot;+aF>Cw72*8|YY{$x(2lSR;XH(X zgx0IOE6#n;$0*I09Dv0rWs$TD|CRufr8}j2@LY@(C$I%D*C;g$v@~fswP^Rq?=odG zQIIqf4D+Si!Mq5`4iu1#49gIsqKURt6uSWUa{Nn1k;}yk?f^al1ujN>M6ht+tnT|; zcDH_?6OuG+g4Cgc4-0RZUXoWb{c_Xu`>MtkuIZdHBjYXUO{Vu{QzweQ^(OQJ&fu&K z8-~WP^}g7UO+&2A>q`iEG(Hf{8XgGOj0l9Uj0%JY#ssEWonL0CViSEbaMR;K!4aKFz=Z_vIiWfecf z$yeS+WyvSrPGzW0FR+Sg)8oknR`!T*u-vvta75iJg553={A)e&%X6N^yW)EJwxZmv zg!Y{WXXZix{@C0f28R^C5vbL>`$WFI%_4aCn5c5oMJyQnr@kkG_3p0!=br}B zsnlrTi|M2z_&1v-PlqJ!fhy|$+YDOaCG`0J-<1{oz5m}UD;9U@Xk>+~V9Bz(=R1lQ zEnhzWjzwj=zqmBK*Y0_pQ)GRb;P|A!rh?slI+GN#)W=52uDc33 zKsq295DQQOBtYjCt8@{-0T%#mfb)P>z*)cZRUjgP zpa!rQun;gCFauC@c@bI;P;8)B=_Q zW&>sbiU3AHB%te(^@+DbWYO%5yznOaGWU;v|L@~Zj0%wr)6(%;_umfL1WDa%g0wSy zf<)g5(`TBPsp9YDkN{rO2dk2W)%+T|uPH>9EO+6ur=$i@<`=<~9L8uV>G{)r19S8L z0d!};kGP5dFNGQvOE&-Cf>Ck>|MbX^iVXi7P)ljd=>-V)J<&_{54r1hnZ&WQUVuQ* zU&lh3MCYa5@z_e(4_I2e<4yPA3%0Nu@pK2$({9AiJc)SZjrhtJ!H=@L^S8ZX+2V)ECs()Jd9=nh6?()~>x@`<_;*i`R|RmFVW` zmgu(Y-qan}eXYBwlk1c88Tw-VQoU3Eg#KmyN&R>FpYq>fY%QV&;a)w${gYNy(*-lTp`eM0@4x{qd{CR0nRjO{%e*hMI&)3t-!j)_ zJ_@;fKJ#Ga(ad)01cuucpwf-dSl`s{F|W$(^Dklm8~Tec!6A!lmNT{-)5{+Xk+47Ma$CR>(Zyq>jOu=p%e?xfslxvO$F z=I+Y$G!JW@((Klp)qJT5 z)%MrMXj8R*?J(U~olWE{yx1|zd`>LWZ^@7f5Rel<8jPY zZ)S(_nlUUZK5JLjw^{b=#GLe;^Etkp+fli^+^M-g=ECNc3Q?Ic>I(IP>Nd^&+IU@- zZXSM{@B`hybpDK=Gd-qLrcTq~tl!NuE#;Oum`#f;cUtbY-0QMDZ`p16*z$uV3GI_= zuo*x>8g;xzkLfgDzunN5k&<~!=DnG>n%*&=G@mwqX#T|fx%o@;H)bXJ=^7-u(Xz|( zGy11b?vUK!xi{sm&V4zzJ@+!{f%VuEAm|HWt5iR%zEiVAb5#4GwzqDeZUz+EojTV8 zx;kCHq0~@eSSUp2oZ%b8?}oIDdokJ8VY2PY*q8BThLjnRIXH7DgvXFsky(lA3^6@z z>XkJ(%a}DKYfjdZtk<$W$l8{3Jjb6i+)`k%S?;i`v%F@RO_kV+9Ss7z`fjyLy+!@H zdY^iXW`kynCR#h*r8PpyZq~k`{Y?9d_9mTIm#x1^U#Op@U!Z@?5R#FYc}J#XJY$?< zx*M;%loMi!wv4ctEORaQTHKb$EH79NSl+XIY5B<_%N>}TmYYd0+$o9O&ztIt>QGHz zXu4QUye1hcYKf*^vr)52vstqhN@|DZWsPg6=2wlR4b>{NO6@XjwRV&CS?xCM4(-d@ z)7op=kvg?buQTehbyi)Tu25H`dtA3!w^g@Iw?j8k|A4+mU#DmKP5LeR=k&Yu-{`;B z|D?|_m<>+D1BNw*2MzUxM-5vIvWz|%kr|sZUO-X$UzX0+5&DT( zw!#fB8crB~G`OZiMXb-*VR|-8oxME!St!XyQjce_<3*rC=MB;f(Ijb7G$S;lHP52U z-qC!kNzzV&7^Ubkbdz+8buVC5{7Cn2UB3QqD5U40NxSd@gJGhf$gmFU-xmhIL7Ab? zsLZ%0L4ncFkpFrGEuZt|E8xlE@`k7PZT)sXdK)}gGfEQNWHImP_GIW0RY`_Al# zv)ARY9F-*oqy3EKqGfNc)P`*rDt1zRMm=5oBnImh?VH-qwL5ih=-$#DK{aCZv~8j3 zxKLx)EY>X5I5ahyTFpUCljfM^l;y1DJoL#$%N0wPB{cVmrunj%#S3#6lia{Qp+n?# zx$ASKNSQPXc)Ge!ZC9^V@4%|b)e23jCLNl^u2~D|IRVYmq3P1ZLWXB)7h+l4s%_G? zX(g;*$wFl2L5pvN8feo=P%~rnC@}d6%k8q(EXfjTQCO6gNK2F@))H??wxn8&mTZgF ql4mKj6k!6*u*|Z|w%9F;ElVv9%Sy{COO2)0vevS1+5*|iUjG;5p}WZd delta 25283 zcmd_SdtB7T_dovH7YnQkf(pw0CKxK}!d_t6s|JXLx{5B6H!8Jlv%Kw^l?uKW%yp&I z(bTfi(6X|!yjG?tCR%1%W|UTFWMgYZWr|6@e$Sa*u->2V8eDZIc%?zsw#s;C#ro5u2cO@Ox~Kz2-+Fh^H;6Y5TE6IS9v)ouIS;*y zUPt)Fv3C}Ih4B22ltoK8{pkSz`+;yj563=uznzu2Dm7+Rt72?y_qs_!;ICN_W!+?5 z1>qP7FZhYy3`Q6UXl8MX9~T87SdcM7XDHMoE*FGnHj=w!{kpFVk_iTuZj}jJtgBLc zh3$|D)-Hmu@`4~FtV)gV-E@l=$9VjI&4MsawkkEQ=V0gb1y0bqHsZmmK1TkMC3T-L z5NIVseDjGa@`pjAP3&Q^HNI+9~wtIHI zAl@TeEe{Dn!#7v=B=hC{)X~EQ!F?pEwx<22Bw6iQe^~@th+OV^i6y#adh9cTn?yWJr6dRiBx=Q!;VzH~yO3PA6SFNUvIKm>7Rc~(hP66eLGO#!jRPNKlS0&bAG8Oc|9bh5=p`geGg}7vA$v6 zFTNm8VJmCs{c(ceEo?2HI^}r7c<&YWl`xxQu;fyxu6bN-bdN$1%FEHCi8i}^&``n7 z55{EWj4AEuZl6(A9bDa1*xtTu?FAGnl#mW)Zbp4w^fRcLQCBJ1c&I5 zIRqOk$|^+{Fd=EJx#kL!q7BWK34*&V#3d_2#>xCe7byf+&&_ttQ1%jC90IZ=Ad9Nr z;!kgBTXN+*Bo@LsR8Sr9RHru~<}Fl|+R9W?S(&FGJwb|AHDT1RMp#0U^uLbR1;F|SMb_a>VsrE3Z~d<3)0nPR=RLBd%-A=Gd4@8x@p69qi?sx7k^Xsk0zxzfUF08>%Ads`?q85cg5Bj818nq|)n6_HigCUUCQ%mi)-X7<11`58RL>_mRjM zZhe}oAF{YjX{8Zs>g~&Tm79?2FXu=YigGuI?ncoOVLye`Qus(sJss0!KGn|Bn$M$LNNS?uy{b?VD6Jz+0d!lZ!_Vq=As$N&qgtpl&An|bv2h|ZV zv9yej1)T_u;L0VEDO8Opmu3D>sM(vYe|`X2U2aeQu-QHk^6|Ul#WN|p1AxP zgiRmL)P+m@qv7(#C@dLa_LUM=u~*~CFWq~G7sU%gqtF|AgkFv#Jt9&^b%|jsi9+A# z3a4Q_6$HU|94plfo!*r~uR}lkvp8FzDIdq&B<-Sy%2_@RQcr#Cz$7W%h%wkwp5zXNOG& z#M?7AJO}+5;QyizdAsM5O7>!cD~7%|1YMCt<*>5s5*v~z6a+y&=ahRJu-w4txZMvG zYyz~^NUna4uqnFW?U`s{cqu)AGO#?lUiF3260EuC*Fqk68>de3MU~R$83WA*tx$+d#dVf=$E7SksilUFQNz3O)Jh01IY*~f1LkqOT z*?X+|5Z=~%%zY%rGqO$pv8xaL3l>VFFo22AuRy)R66s^ZRo?}{?2w^iG)6=9ob69kPrHd6-#qTj~He6N@&5r z98mQnqhjumS;+jDS>kV zro=2g$hWZ>eZh?hPaCn^1`2fmRn^nN9$fLn(8;7228=6+hu$aLAJ-m_k16kwA?ef{o3#_i)&UHy~{|xic3chGvqqQ0{?<*EV^=X%4zu4zt45IEM59bY?o7-T%fiXR#QX$w1y&Dl(4k8q z*9w&KD(7P`f+4YObZ0U7EKwycE+(N#qm->|6b?fHVL2^Sp!=a=qUj6}0?q6K6ix7D zvV}wXlhh>j?fx!kye|r_p)bh(lp&E{z&Ma$%1)0I%58LUCb^lS%xKT!@>7Q0o9Z}3 z!`&uDtUXjaRDuq`0vIVkrF>4TsX9@*O1!E4D|dDa6l}r*8|Y-Wu?nh3uz5ApppaNc z`yMUBsxnxonFNCK0s8S>f>1|c%+Mc3uqEcPE_6IcTzC63)m(E91kK z<`8W(w3VDp8y$5Q(i>n>mzB|OT{!E)Op@Jq&4_n0ShL&cCbZ8+$7uyDkEc*0Y)dpU z*!`*G>**~@z#Q&orLX(~qjN@rQ0_+Ovqge;k(D(dnsiUk>ynDH8*EhnI~kpxK5E(T z|LCAAcy|$+`r#C=l<1zMj+N^umi%(4Hb<0wHY;iY3=`Egon{ld)YZ*~T`07&vzZ)9 zFR3g;=VSC0js!Xxl3z~Su|je}BrV~D)1xp+u+tihdhiAcM?9@hL_LO<|qU8si<95PzL+NVG2#Oho*pV2n6F|Dh>!zyXUA&@jy zJn0S}v&q9eh0Quwiz~!RZzVvpW+01iu($AvSK;mA8F?Oi{RB3o5S{7NTX-Jq?nNyE z<8NX7a_|QxwzsgEK8XEB11(02FR#6YRfMlgORi)F;T77FF@wUdvS)NFdnR6CPYq>H zV+)=$RzydpupnH{gT!V|3_j0;j3yooI)h;2)4>X`cngKPc0|2}@;U)==!Uv5Mz3kc zx?0&7T#Q5+{%vNdb?xZH#^6>wyU2z5#^9^Kpa=Z>PESn}@@!s)W!F=49;g&u&Bnlj zBG9ir@J{7yFHi-deLFT>QuIQgZX;t5Fvv3cGsdUf&dTN+zfjOUCAyfjg7# zcwZK}3kA>t<4AMnz@q)YZS)cUctAY5xsP^T@-g!>`G#U!aEvF9{%3L|OBMxRnj_SH zo+m4PWK93zy{)BeGDKkluvwkrEsTz?MJLbwrI(%{{1RnNCg9SkJW4ZLFml9%fYnk+R-t{BNh9?KeB z3ej`+8tT3=Xzp*d#Xq}m^mw$pBYXm0MgGYeq)l!JE`V4%x|cJLRNq6eVkS4xh3HST zFl1j_FBrO@HP8n;NbRtQva(X_6x3K@_Ta6;J&GB8F8lA*E2O$I+}@| z9HJND9go>CR*GJ)B`*w)L&@(B&gi-PqQB&hA|Ag;S_da1F>**o_o}?k2D0|tIiw^a z%_<1*vf1gqQ$f{M^1_h8;>HW)R=kG-w)99v&2eYE zqgnOYgzLZQ3&Z9`&^#ywwzVJ&j7dHHdp5RLLFebVjoUNHHsd^TL4T5HN|r?tqsb71 zG4uNwu-B)sh^^9mxfH#znXEF66nAHmX462i&Pu|~Nuq5u8D#D&W{)Og&08vU-!iGf zg1&=I+FxNG(G6IvyltELCT{p_XQI1urr_-3ebkPHTj5BduVKu~y+>&%>S=I|V}}eBj;GOsXwe ziE1!-5);rz?pwE9NsYk_W#c17Z*L}_So$R2CTv6TzH{zd?XI41czP{z#!Z5T_Qi1D zF;pvyAybB`vUc!BL{!^QN^K#v!WpwMK*|NlE@SjN6X0`g${-&MEzDkp|e#mlw&Z>z(~0_Lw5})WZlUY+hcK6+=|Ric#6ZVGs9XjfCIRho9PRnHMor{*nD;Mbo@iD$9pq#|At#L$6-h)YOe-4k6;#ol{m4vTV#{8+cj+!fpyK=JVqCqfH1jq? z3}=w`>|orHx9P!Okq3&BD_feN8J3G6`r073Du6yDZOhW(7;=#+-dGrq`<6*l%dgce}n(d@B=eujIyJ)KcYq?K`G`&~(uo6evFYX4P| zm;msudNG`J6~n_Q@#FA-{$q5QQ>+VBV`B)(YrtGRg^sc>!0?qxXYDgtSeM5-@~pjr zN4Nx?wNKy?-oX_s>d%KN{6K*zGvmC)P?n>Xxgt3mLY-R$~7|e1i z3M{bM85hs%m*KjAutiiK!dUnv*D9T5mLs3@TcKnm4a@G;FwvLJj`TAHn(H5bUX^s# zFV1!T=JHDDCBBhz-tRRsyUBjMkckg}(=Vk{b&#XzlH}yuas(mY01l@^130F(LvCX%cIf z)L7WOj59Q^xIl78_ZJtYk^4sX6LzP7}Qy z(!s30{?`6UkDQGP}LDbF&=rM?9RZlY8+Pm_~ujtOGAhufIrON0`7$z_Y zAZ#`J@NWD#;=nsdvX#!LdoLYt!q0EszbSGat zszOr7W{SP#J$sl_R+q~?2p|E|qu&hYVcYL))VVGLWn#r!xzNn(9 zbZ}H2dzCgf&OL`gb()|$QAFIhED>DAYQ|(}hLjyw~|ISC_EES=h02jDSW;uFsO*K9m3D)7p!|AMd3-&yU6?;C%D#!U@mK84xKqHh=sf%a!$k1mSnOYZ z0XNF$)z->)=<=LXyl@F)<8Q;pzYKC%UatCVm}h)@N6Wp%Vb>11okMiv6|Np+TA86M z(<#h*P;r91R+c2rzDPbQOQ_t3!E_t`+z;(7b0yZw|J(=6lgEWdQo0*snAEUMmNTr# zeJg$8=g@Fdr!=_JIU{tT?>Dbh>P=*QWIMnzmw8Q)r(W*Ga+x!{O!~ZxrZhrB^ur7} zs#tQXDFK3eo^ww1hLGtKdgU_8r705wJHJAOr2dcAM zbcr!(l>0>6gDHv+hy$?9xfO{3HsZH@+or z-xED>&MR1)K7i7}k%+tPmTNdY^(`WKX|8g56qqN^XZ6OE+3a&(A-~-dpS3ZNnn72y z)EFO}u#d&#Y@20{q>Dgo6w+7)Dc_R3iKXI|W>PzGK;@X)l61wuW9|zPL5JdxcRN&m z`Rj>-&`AyLpdJC0I}^vS_q5S2un0Vb@Kc19Z~G4phhF`LRlw$edsJ8o>y{xTrz$1VC+S2#RAwEJYt z1n5$1kl&j`o|-gRoK->&Oj5-_Ir)ME2P5(nXOu%MEgeJ0Pb6I?r-?!d88kVQtSXPw z4m;mDUO!;Z#u~=KQo+@vFokwOR$I2TSvLbm<_*5hX246gf z(Muthk|TSjby6>TSqJw#$45~A9!7B0t~+M=c5#1Pqw|kA(_7 zR;QuD++>;o4o?j2*2zfUY$Bgb=@XA+Zz#@YmX|2y*AB^WzDTi&1MA5qB2BrsaP)xr=WW#wx^zP-Lt$AK_{;bl^1&(oR2RbLB5;n5nG#yqheQBE3^t^ zsw>+EkpcIbM9(xa`6%Lmry0?j<2axE@(Xh9-jd?CxNA2ODt_SP z*3%TX5yEiL)(T(N?Rz7*D2Ia89XAK~?#DgyiUcPNTv%*-6#PC}e&2ilZi4mxQgPH+ zvi1HEv20`J2%dslV14N6&pQT#e)TyCot80)GA;N6$nc`NZ{Bjn(@lu5fvf4{WfHyz zJydgUfkF@40HYR19}BQe>13lapuW$%4LD`4HLw%&mDRn;3)8yCWa;iFdKnYkI(56xvIvw-1Q;(xBqUnn~3zx`YpAp^k^vREa z;y-xq3Cw%cSL|LCiCvJ+6*jjAZrni%a0gwTRw#756!K3eItF=!>!igO_y{~q3g z=^lOz7eNbooavtmEOZi__d|v2MPXr3X0B1CW#}r!04$hh;eF=8bfuKiFnHa`8Ra0(ta@BA!ga=0i%a z`E2&iD-T4AaY zvWFg#-5|3cS{jb?UQbOklC8A+KoT@_uK0Kfd35G?;@d4`!o%4jOjqy2L&arD%U*T~;<#+V;|mD$@X zXWA{oLo+PGbATIw!UwQ#1Uv&cinM=>x6Fo-W=3Hoef0>bof}(u6*G)my=;@nrW@;7 zX5HZK))9;W<_``$j&@um!B}5PhFYs-N6smKcbb+85mT`i89Jw8F(PhE#X$&f>(Dj- zH5JooYJe}KlaIdI@IU5ajLbvso7b<$b|m>rqq~6mC!D@F^7<#->G7Qt&V41a)H^v6 zN;6bde>nsa%T9=0adZMNgU`U5he>2r^ib_>%2$UaX`h6{y2}k{YS}@C>c5*GO?%$v z*$tk{Pa|2vb=8rrRP`#EQFVX!? z2U3o6e`kV?HZmNWy<(KuvCJ`m@pL~+dl^6xZjNC#ZYDTAS!=XR?74Xm57d{F{2|xPnaFyHW)K7L+))dIN$Ho;* ztFg(z$g_ibTv}$A^w?6!1kqr0%7)wM-RRTuT4#uCc}R7=>nDeNco}tm$;Y7FV`Nf? z_y5>mLa9$>qjAVk#bjdwlMP%)F9J7YBNVb>w!!=GH14{&$*?DT6)^EQvY8D$t6y;6 z@xKxeRt0^5b20Sa;*s(`+3;k!xOF_a`sDXw{W$W~vQ+W;aio3OT>~L!ctP>Qz+KM+ zkywIkrOOhda1$0+kuP9(%5ExnG&9r498ZDx*cr0dlb+758F=`W1MJe+{uuUR=TUfA zsb3N$Pd`z9f!wh?QT*w1QnWl}@ZJ~jmbeswE7Dvj3)_8N0yq;0UW9F5l>~1K9^}2{ z30-abma!-vh^FI^>AU4=TAaT5rrX%{b{xc_jb2x~3SSIFM*>&1GMAt~4C8Tjrx6$J z;lJ``Mj`9q{QFDsR{?dBruNuuX~H3~5h7xt1!)-z&4koOj5;RSAYMu#nIYR>EP}3r7oYi9QoAJ=dS& zd$zH#<=6KQ!tprK+kk_p+5#i5V_Yh6uS&y9zPu_G&tt3d@RY625R2g~UY%4N0~x|_ zc3R#8-$p-#;QAtZ=|-?OthNechiy-7flOYNatp0~4HK~Z^5&c1(|S*KFp;NL$7VAT%n9seBC%g@}EfyfjPi3r93xWR5p_b zpBvG8!AZRAe_hOcaSb{4TpsfVh#MWT2bi0$9bOE3V<@}2>26f`;;i%{AvGKCHp z{ud4CyOQL;P*8b48Yj(H(1|<@qoq8Iq<8Z$ir&e?7^-KXma2I)g=X0JlP2&m zi$?KKNyB)krg9!8&~`LInx99n^3Xu9AaozGNR{&~^a7_9(64w{Oi%N$gdXRil^){Z zIQlLRC(yTfIElW>!wR~Yhtuc=9@^?Px4V z+1F~cJ4e|kfiwsxZ!_C8(mz+QPKadaPb(@J9>wtQI5~!)XE>U`(Bm9UVdw#lrZaRW zM>83^jicN`|iHsG6Zqax{;j3pr|F=p2q(Waxi7gX0B^JdvZt47CE~jV@ua zVLZXg&|Hp=V`vsfConXXqmvlgo1+yB?FJMZ0@#e5W-IQylzT(;!3vl~&d^FLrC!W% z{^>&)Ct2xPKSiMrjto$a_$f+#a9)7&4yVjl>oo>WvC&P8^6#1=s(-=&!6m5=W}iti zcgy4ck|OoNF^p0PlHE^=(g!C5m~4JZj6OIeKq>H367<380SdkvVa-m#7aIXeil36M z56%iuy89`4`d~|d(!QMMGU$Vuh3E_&Yo$N=Nfv!@ae%4GPbts`mjozB{gh&Tur)y0 z#VPYk^qO&;VxuoH3f`ZLsJlPRwWVV*irqr?)eR8MTS!aYmntkqxY1T-bN?9Oz7(Oq zHD#(R`0||=LAc%+gsyT$9l9*KF9x}9DCYM6W2=1J#&Bt~2}fN~Y}>--+W}VW=FEY_ z_-eoDgJF~hcG0Jw$BydV1FW>o?BVNufYrU3J$zjcu-Z4X2TUoH!cNrLF|((zSyk^| z)Qpqev!LnF347fTWa(zxVPNXui#2^0eTA~MqP{SThRab zToJ1Xh8* zkUIM#6~(1Vr#yvMfW&St8Au}Eh!G10k^yh@6}K%Wqu+=N3x2jkYrVRGxZWrdcWxjb zyrBzQu%UyVuz`qgy2Z=u$&+vPt3-cJZrIE|4e*4sCobQ_QWI0d`i${MeH+gIGF<<0;aIeW_Nu$zJ9aV=<7SA z5{bEt6WzC(=YMRY&%ypLr$c!CVd#9nuY@Ln;Kk zKplJm8=R>OFK*fScbT_JYZI79zhZ}~@)wqxf{#)V1~SYCdN~o?*9!7*bQZxbwy>)@ zXQTKA&PHo`V{62uYJ-R(gRT{X%Bv!(S0F-{!Ug8F%v2rM72~%HQgHbiUp8C4L6SPu z`y1|gpy%+t{-KEK3UKRxbM^?!t2HyOYAXm*bovF-_w9$}eQ-IJta-bi{J9t)2i{JR zKgy7cZx3Og_VKUb$<^KF)Iegiw}vIUw-(?DCwCr#0N1!Z{SvOOWPQZW zFljqbkJ4wBb~9EgtRm0sNe|z}8Q>MdCp0TbyogT6(MBM24 z={yrZ8O12ALFRuQ&6Ee&Or31>0(KGq!)10Tl}z88(W3=vfi_+N>h}jv9fiz(4_cQr zj2&~?DT2_T-@p=RP%pZnGb@O3spQ>#(c&73oZ1&9E|$opeQDT1J~1PX^nF*K8tcTs zG{M&P)NF(^0rPkuJ&tHV%4gP+%6BsqKjJ*+f1IndzI$K%CTP0Axr(=TKMHuLzDvZO zYDGO3>xbX#E7t7Xx%#~rQN9}97j~K=K6jHG-=88rc9Zz_AK(Wjv83UH@nTsri97JG zQw%@^ct-l6>%6vByh9I>^sWa_R%;CTU%_qe^yQ!p~2V#g$1U{*jcO7wd~< z@|g-`9DH~_KRxMBP99z;#{5Z28YYVGUnQ?Lj1U*alB*3X`d<740`LJl$dR1#8maO- z_XnF0fyF27W(b{RCv`{eSM8s{gcJr+`0Ey->#G*Q09X%b0ki_NuVFPoTE{1(Cr*$d z9~M=1=>>)GDfZ)yt{(KQB^`q2ZS;QWyNe$Mw2#5}cNl+msKWg6aE$mx05h?Rm2O6! za^I)_JKxve?4r_m+yuu}`zU)R6WBG#!F+R*&wD0UVr9e!wYPA(P{hcXO9O8=qQU#Q zUxw>(&WSrw{G2l>z!%iXM_-2e_nr2#k9cbh+zo_VLi|X_8xe7!BwOyjAafqI(xv!t zv{Ss55bp#s?`UH9W@Hb{w^ysk^GB1?LQppURPZp%;n&mtb3*=61MUAD`R1s}%o-j+ zBRjLRi+44*+uD=T3J{kPGp+tIV{ zv+qLLdB9*B{hkp}r!x%~c7fyk%Er%i(W#BwSFcMR@UowSb3gkJ?x+5Xdoj3sld0>v z44C|n9Ao~Gqwv3SXttBp>(a$PqR75=TJgXp(z5Xp%|Uain2hP8`-X$z7-r-6pBr72!4ya78-I-UbTQ5#eN?KB%1I zQ%B#6iP8NDt~TKO<7z{Cyi;ww^f-CdIWGv>fh4=4gP1nRb)|~uKHPbaD^3*qw3GRd zUctnCOVgV?wjfa+@)sKL?1Gi@VQoMLKeix(AD_0OXe<5j3($5ycAwmQ9Z3A+tHnc4 zleZq{s^H?|Toq(I!H-bKJdpwwuyfuMi)HfQYheC!;SAI=Y0*}3`JW_c@gy6eYy?ZC6|U@W*pJjH$lm)qfk#mh^l^~ zuQwevt5b|^v&gc>lo^nhgDX=mhXF^20)YXHn5+1YH~}Kh*~|! zUiAm8z3gWGyIAD$osL%y3fXi#N!+)B)E|#yH$2ZD@0-dRcC82_5|8s8R=NAA#pm`e zYVx|8(ZotGN%<%mj&|)wDRJH>Q9FM9sTr#M-r6u;g-6D3qt5<_eX9NDNBz3i%)}d_ zS%;RBZ$CO5s)nf4&D4-rKDNYPbaWW?OM&6i2W;*b<{AzjL&OuA(aT{V!}_J`kQ(@v zffWv7K9LkQljTffzvB_rKqRbaY_wv^4VRCj+B?bfe*|=mb|g)P};*h0?{O`$<#fB73L)NCC>mrwTn^itAmF z5B;~!=?-Hl{e2{|RXl|L`U=u0c0aTWFkBJCH`&Ynye^74Ps7j^865m~SlZ;P?h`A$)?bAClw{Jz(pav_Q2*yBb@4@byZNOIs z(#mMnWU}?s{{1IFWFU|c?vYm|`U|WK|G<^eEhQ*L=W268bF|ZEO33w32lru>(lIEO zpG8GfYy59X45tQ#^0e^(klCk_Vi|tZfme0z z@=}ZweQz2Xl^5*lMxUQX27i_@a^pow;)h*WT_dUs&;YjByzwXA3e}85*6U62_1Bw< zH{hqm`X2U-zsl~I^rt2i8>CHxRjt``5b$t}T4;A~%{$B{K)#*A?rcc6}2IBcF4B4X3I zBZ=%ko0w(b=DqXc46GP|qn*Gt&}W~*LfUx^G?wPyOTPI$UQE1~{PlT3=G!AsGrL@c zi=A6J-g&X}*=E#=fx^YkqGmGV%nybj&(@`y25X3;U^z%!CS_zSCBJb4hve-Y$2&l z*DF(kWx`;91uzO=2RsgV3a|xG4>%9_9iYJFoOD1wU=&~q;1R%5z%) z3cw?P#efxnjes`+hXJ1fegPD-H^tvac*|XYvw;8pTbl%*u?36N28&Sr5w08HPDsb! z4$WDMP?UU|(2>yb!;_F^VI&|$0{HMth8WJ`y`A*}RSgjj?Ygz(o0r+&gOupz8K$eaa{2$LB7 zvPDQns6d#4a1*r%srcSf*m21sEC698!p9Ig5k8J^0>XBLR)lD`puDv6k;|WU-TBy6 zl`LcYBlq4nb9#z%R?7Wzr{C+Go-%vxtox_Wo0sC6_uwNBq}-7)bKV^SkzE^2R$hzV zdGEEXpk$a^vvE%&2t|!8Ehya`+HI|-{-*-+akrx*69$W9`wc_a3N0?ZY!P-2CIfCJ1%0CO zr#x>U4n${J{Lw$|B(L2}ibyK72+;QI1%*L{_zA$9vE7@CZqu$5ky|(8#3gsLvc!_H ztc-}Wr547||6VG!kchux#q#l-Gfcf9?cL}tD3+U(nziSWc|JT}s9FPw0UM))x=~@r(KKrZJ?(M3OB`ai6 z>jp&1ZdK;}U=h>+B_Ios2}lQ|0Ac`<00p4E1wIl01)K*o0nPwU0Zssp0geFb0s8^_ z0J{J?0NVju0P6uO0m}f4zpBMjDJ%do8!!zp319^j04x9lKnX|yL;~=WCPIhUC=eF_ zZQr9|019XZGy#qQ_5rp5)&QyivjLL;;{YXq0)XNB>3Dk}S%7pv0w4+y29N{VFIt2v zfF{5RKs|u*??Si@upUqiZ~`g-C4d5e0gwq009P(p)*b96izdpZ;$iID?|=UNzmKmQ z&`XwgFglQ|2tqUmkB~1FC0kcU*s7K^8X$7 zuInwkC{~V@2^5Q23nYU5wT_bsnJBFz9)llq*oMWdBVIp+nIgC2nbVM-aXWs>gE$ts z4*mttf*)mf#G7A09QAa>#o`zD{uL|hYOSQ8c2y2l-lKe6S*zTwJgEFq`9k)s?1-Fu zb6(1MC+A?!<(xlrf>nBzRW(WFQvFA@UbS0wLiN2WMBPn2NIgtFRlQK{Rc}!5Rv%EG zQJ+))p#DuQXo59;G#Q!!n#Gz`ntIIutww9pI<@Pxt=e1Kj9gpp)Z7`lPvkyRnfqGq zh1}r0=)8OKYV$tNyOeh$ud8mLZnVy&3(8N;H|3AY|2W^He_Fp;A8+Vu$TQq&m}U$& z8BD`WV@xYe&zN?Y4w_DwnoQL6tLc`htGSO^Zysr$Xr66;+`P&Bx%r~mXBJWfVLe_n zLTOTFWbakqr#Y+nMk8rLw3Vy0uWD2C*5+w+>G@mox95M5e?0#}zFvRPaF4OlxWc%> zxYhW!aj)^X@wD;0v9Br9lw+D{nqyjET5S5mbi*W>g3aB`JR9o z9&5hGJi|Q8>_juF&9&zB<}Kz|&F`8In2(r0GB=xlG+#5LMU_fH*oJUt_7B=#za%T$!+@G)Wf{gyxP3gyx07h`D=9H4YObogknf& zPvu}`uF|M1Qr-=jS)zPR`M&Z~Wqh_H`P+=u zb*|c|E>e$GPf%B=XQ=0@7pQC0Ppj9eU#wIgQlC|~s|}iAnwgrpngyDtG%ssTXo|EC zYUgQR*Y4D|YsK7{+{E0B+=00{xp}$f+``6zS6)J%A#WJuELNAS%hV0gjnN&|Ri4yc z&|T5}24M@&@0mY1KMw*|otasJZ$XYx1Zzmb0_zcs(NK2e{mH|ZbLcQYgzl!iwQ zPa8HEb{X~?4jWDx=L>Q~h@m7wma?$0~6Lj8ccO1(z?th!FUNBy-rPV)$Q@l$P^c1><; z-u1ja`QPRH^rB(C;kaRxNp9|LK5RbCgs%efXFyo29IqUZt5k`&4@MNcDd84YfjZx5lQa(9F>+)U4N>(TG~Jc7*m`?ea?Per=2PAjT|4 zcZY7O?m^vay4|{ix>LGyy65!U^}F=%=@05Z)Mp!VA-zu+8V#QtE*gF`D2(AomCvDdi^RPUVNn;Ova-zf=k8@laPA)i0|* zP+wFBYbxK-?9+ar{Z#w2R+bx@yAaCg=iKRem3hnZc0d)a*4>wXK3~dTuXmf*nqR^E z{n%f@NAj+B$A zN>^p7vQ$cyT9v0Vs4S}Gs+E~6tQa-^hI@w>~#D zZ$O?hZ(m+x-j{jbLc7{@6Lr&c^K{SXp3`m8?bTIAOF?+x#E%f6H&r&()97KcxTFaLv$e7;BthoMwE^xYIby zwAoZ(o@N#{L6*?_2 zJw~%z)1>)b(+i8sVr`>#4u&r!@2R}u`8oQ#^pEHtHNDTusuzT61lN?h?CIH0X3tig z(|o7-Uh|{Ir}qU#P)hHRenf92% z&1vQ_=J{sflpv_l2&eK1O!8aG^6aIUZO`X?pi{}ACR3pzN2;eo18&m346XMz#xx{X zX!dt!MfS98d-lxi+1U%TtFpIe@5tU|+HYESagHpr5@ohR6cI#W3EE~pW~S_&VWL>^ zR46l*#Y(%fTDeTQO?ga7m5S_4Omu7Zr0fOR>$CS^k~U`xIVm}ooC=6+RnEqoO*!>B z%{hW91p->3s#2{`p`b_^7k5F467w>3SvsXot;^FH*456H9qOepg_$BvQKlGEf+@w6 r4(*q0!rYcjlX_;vS*6TH;w42rn3;O8110TFVPj#I zW@cr5yI5MHmf{ugig(OPG_y^|Z=#jrCC~d=dvB=U@BKc%=l$b-p7)=Zo^h>Nvu0+^ z%$k`sYi6HHw>Xu4<5Zc#%WG1HnBN8UeRsAWOyU`2I7y7ydQl;fm@>2D6lg6 zH^AQ_H$-0p{AG$hdY3}qr17uSAXDJz+4)6OCa3-7W^!D?3r^gME`LpJqqT4m9XNNv zjpJ^D^qESo>&t@pE^gie$2oFbNx8s=^L@H+>cMj<#IT9yw&Vg<@?40!nnV3p;O6lh z7w-T%TXn6wBhOU}Yr=zkdzfY}HG$=~U-6GO!cFETgn7yRC>wGl0kp8bLMPWW#QYG8 zVNVJ{UP|exh4TO@g}nd<)Xj}XOk|&dVVDC@P8E||zx{&Hjb9V*@|t7E4lZDkbRQ=2 zoS1k>tSF0$cVXQeL%Y9kBuh9(c5lkJ`yw+p3nc0=VYjyrzv z`0>{qLxsdd*2yuZ%c?x&JLL0U^L;}3h6J|f3%SMSJ9g~0eK2a39*ENI#LL-IeYSP* zBmLfnEU8{D?|=fNIc*AEAVNdsggUqQk@H7ts%UFjkJzG5IY$+ouaL zFVqUjkPc=^7e(o$T-A{(Jt;~J@>0|YVIk@WcQgY+OO5*1_sci`g22|CCQ&+@UQGil zU)ZYDL-}GKqbfNE?NnrvGFz0kCDu4(-V|e7A^2Zms^HnukwR1ok>yBKs@CMG)up`+Ol=kR{LF;U8E^krpe%|6l>+lp+fF3gQaZ){YNXw8nA zC_h9kqC`;%QK?b>k-g;{)APKI@|FZ8LMbw$^3YvpU7XB7y1e1r|@TrYWbBXqHn#6PE^(L3ZGj>CQja9k?cKsx(^d5SA)y+~@ z5);~!y(cjT?%M74+Ki~;BIL0f)+rgr z)reqUOKNL1l&G1PA@i<3!E84yCZ=)LadpJCK;Zjtf+-eFgHKwK3yFweH9SI*Xhu4fmreA8GLzgQ-G2jJl5hYy11t^G4y zShz>WgdLRabtT&hWP?D|>}V1< zqnDuwi>ECz+gc>pI;2bID8mXL7V6n?n#(Y%oYq#FU{BrPxWfLazh_~z8%(a*);=;& zn>&!JKt#*05ZGFb?W-`oqWAbCy4C%8E?16(gjg7@hTTp|muRbKJX_=0+wE1*sCRz< zlb!PH+VPv8u^^mOwue>3o!?4{UoyQ{urS}7#d!sEET$Y7a}#D8HBgavkT;v+72I*Y zmNxjGX`bH9>eY32A(cl1(nLi*!Ys^|mPe&YCv(ba9zR1_k?<1|_9)$L^}LLhD?7tE zxl*U7?sBY(sUixpTJz8=yQypBlmOCHgJNXVo3Tn%d4vCV|hJ`FwA1b_f zgDucc8MYg{JL=q{Q<%V!rhoki)aS?FAyvh^sHE%)Q&fDm)$=qsvZaIAX`ubt70aVy zvMUOrjBwBipkm|rq%U#(b-$#dFeMU0~WwE=4&tVTNhhCpLg|Y$*(lm_D5iM^ zR{7(q)S`3=d-X&VC?;Q8G*GnazVF25`*-Voa~Q^D_uK$I>DZKm*y|t;Fwq`;{(D&B zZP~xG-1l42!Lr|5-aU*R_YVwsk5ZwJRzrO-m3nkhAC#KYiT&jt*KG>qLaX9Y%2e=C zUQhk*MFDyg&^(Y5t-~mB(D!Ui$02=w4MPcuhsBCaE;I3Nj)cwa4oQqr_m>X`|2;oP z0dHP_*2;-F>{!R1L7TH-FyuF3tOv=9h_M^G7!$cLnE3^y3SENPn1D=SQ#M;4&|i2z zh@B6J4SjpKq6ogw7g4%Es__AiA5x>uL!K7ILIb1tS!{5iBuFmoRA8289?lq=M*cg1 zMF$NLYO>j!py!3_-h<>nYB8I)wHdTsRiJ zi6XmMS|6LD#eCRhN5%d3by)A88^UvzYn{ZoXC`Jj_|;*!(Gt$)1H>DMD4hn=_#vz; zv`6X@LwmtvfLc!4%kP$SBjL88N~tkNIQ^zMRvXlK2C zr=w`=tk^Hvut?Kre#d7>`|XCI=h>LBc+Wp^@X(s@ydSF!I}#M}H|2BKELbd?C)0=y z7|n`11q!pCWKyRHAvT?T-YG{|-hn;tG)j1P8XILy5T2gK%8lKGu4(KG<9N@7Fd?*} zANw%B@I*FrOs~LgH0jFiB?0nXoEWuq3sCmMq$T02hN%4)E=rjV_MFD0j;jM~-lBC} z0}RaqBr?1T>ctJ_k7Xewxbi zr}R&aQTa-G%VEGWDvY*n`$76k&1wz1*ElCvnhYkeMR3bl|4N`?J zHyo?iPMjqmyQ_R-79k}Qkhcrvh4DLFEWc}df-K2TkqE;;l-y}&N|ZNHJh^sBASi{F zlxI^*^y)lwkZ%<-hse=f$_}Obje*$K}s#tU7WKKY+DHp6}I{3Oy#Z z%4bL*w0a;W2xgXDl}TqgWw&(@rQtDDshb@0D|;L@)iCEbbO3Sp!Av8Wj>)%vV+*1q z{nz7IU~){nY)caJb@hl~#9lbcwnitIVN0NK5tdXV79jzvX!oAjTS91P55~3aGOk?_ zo!6y}^-nC`#3~Rg29%T6wX^;hgJ}H%Y~XS}lF@^1yRa~EayVG*hGR$3pbA}YY}@kZ z^Eu9p$T}1!|LbXu*@c+@jHN>7Xe3u6T?0v(XmTE;!A|Ck4mkm0wxk;viS4gKHwZwK z3`PLS(;xrJsW+%6U_Ti#(3GKo3VsIv*g8tBmgP}h(y+UOJ_)4b@&f!@Nz39?7$ufN z@zW0;hp7=G&t1UmokKdWfuJ}|Q4=KHa$2-e@wPlb@9NU+mykR{5hz0S`i|yTBO)+=v$8&tJ?%dwF|v4Ky+5{2OYXF{TPi6 zkBB988JM^dHcT15Za7oNvSW5dE`iCxnZl5pEEIViI58+*yH!a} z*bU?Rv6!wYLQC!T!mi%D5Y~13(r$0@-p|(JY*_7}$a_{cn^X@ zA=Q-7S3w)>yu|&OVMCg=_!%#OSr;Z+irQCXxa^>Gcw?!nfdEL8CiC40fZBt4!G$+sy z_}#&JvJ5(vErnhLB};Pr{cc)-#pD4;6q2qW$)Fu7i&Y z^ndjA1WrW zCwe4?V1|iw?wE?L2ODs^(pGpj2eH?CMD|XhvkQ44%gOvsms+Je(k07%J7#ZI5r(@U zsu&889c4;9lU}`+Wc{)ayV)Z+FjJ}LefT<%J~k>1YZMe+zFWz8V&Yv#xY0i`KV0n2h_`koY2p84f)A zEM6>xg-M35D@?J)INI49MN5r06@5^d$L4>ePiRZ`Q_?l-_L4m1A3TZeJg}(-BLQ}3 zup`f=_R1C(ea$}ZHQcNUx;)A|eJq_yV7AGAsmfygtuiC@WmVLx>zZgwtBDtr8%rE< zD(fJ>pi=UP@=P@kCcExqAD+86Bmnc4E6LBcMkjQ{=;gO&SyCQxC9#T=NOtOs4Wq*G z5w$ZwWZx7%?KnIJn`ie~!R+RN2!}Hq&wLKX4m4#*O;`!0D$8F6Fj^Oi5hlv1-`nl- za?FQJIMOOIt7#XEljnkC&0( zigDWtGt2+2FT3VS!#k~7(RMhvu&HHOC)H(HYJ}{H%+m6^`qlfu0ypX5!t~gKg~|J$ zF<{|{I;GO|d$V1dELZh5IDn3d`3J;&91;BL%os%hEc(5E!u|)2sCb@q6o<;PV5|c6 zdVN_W$|QJFi$ABqLJbCKu+>T7EAOhwu&V&QoxETFstW|wmqlSQvb3h_%Z{MmEv-ZJ zWpEt;F=y(_b|~0Tec8VhWQ@LSHS$_oC+f@QBeSJ-vZj3fF(_1tYLho{4<{WkXsA(QZDH zZm$*XM>3M_%fG|wlBUR=(SK}IvA%o>7VGr8htl+AnY6H=F)n(G2hc*Ss(Fel8Hm^% zOR4{m5;~xCOIxFr?kR*?7C=l>ztxJPGqW;d_1Gx$?CktfnhS7=R?;E_2X{_`JOq}4 zBIC1Obt7KZq1-`scPxj6qw|X&~OgjREtj?POjJhJD`PMpxd{ zSJNhjPBEn`_Eq|~G^H}ju0OjZ-I`@AwA75Ujyh8a&v71g-BY&w^j%N`g=2)q?EbSY zD@z*Kt_AXUbG3fcm;C~RI@3DYNL{WgtmtPGA}ji}b!PKRm=Dxh>ANuhIkh-))3CSUsj9^*ny?)_cGu(%&<9RNSGB)*gC#-rEOK4 z6zTfCe~I~bk*&sVh^%Jg>ic&u1%usiZ6`{@WWZdrHz*%#NI|u#9%8QYTTyRX` zU&j)I!nKQfTFd@89lLc6yk8Sbux#5`&Jd-C=@Lwy%u02JWk(fgGY)<@`$_pkk)ko# z`Eq&9%4o7*m+E&C6E2X_K_i)7kuEUj6lTuW<_X~?d5$ez646rk);meyzV<$(&*?wjt-dbamifbU(X6ZGD@ z`QJdWpf)!#`B*r=m$@AeF~191HSHUP>HV>1yhc%48q}zTqO>NcQ4>XJ9k7Z&sgzlu zSksMagwFfGT=ZqRz-W=5r*y8;uMqo0+X^9{8l?4tIq^*MWh@be>6X8`m2^#4C$`cS zWd)^KL8}@a1aLZ}X!9Q=fX6=S2kUaNL7`m+EnRv*b!If5R0O61g5>K_Hdbij0~ z_?$9Wj)yzmYlJpWZQg`9PBT-j9;Hgb87P?O7M6A*-aHwsF1E4w*U^@Re|g&wTF;zq zDToNRz7Q>=V%%z~zL@@*O8<`7-DDQ>azKo=C zTTSk`WZh(Ww+#2 zDlERVGk3Kco&+VuX>kxH_*_tx8~XHJ*t-<(auQ2T=*{0@ixRp7JapqZxH)mEw;M)& ziE?0vb+@Fx#lA>b#SdeNiBE-0N6)K9cQHzUvq8#or2Pw5Zr`&n5?>MO=dkqN-G$9X z03Dd6w^4Yyh`rx?vvB?x%j(ld=y9Kw_UV=M@E%sl!DKQ7x7gjW`dD?Tlup?VgW2$Y zXLvIUOX|kYV_8Xu1brRT_g^jO3fY_eqk8YVh36G^S6q8YuaJ0I$;V0TdWc=Ikq9kz zhr2J(2nzc02LN|zQFo^j_ z^*b|$d*d#vOk6sNWehO#L)pv$;W04c?6o#rQtCp`X)Sga62j9PxGKcTIuat(&ORH^ z(QoH(ME>4H?rV88Qe?xg?9zZfe&fiVEAiWfUBxcDVg}ag2C<0b$$W3NB6$P<1`AFp zzJcout3II+%8h=ZFT>Z%7{=xAxRu6D^6L;!A=)I1xKT{&i zo+^nf(S4=p7cusfeE+ag0WBI7{Z);|Qwcw+IN6cVlA_xHP>>+YF1r7ss%OsxNs@>` z{(_jT&rRG*)(_ab6MO%mEhb-4(km_{i!BHtXPe`tj3nAv6V_p zQo39z=xLR(N$skOdm(}{^~Qm|&l6Of`$OT1WmnRId@M3atxZoSIphFyOOJE=0R3wU zLPw{vwDc&UW9odQvgn8`M~9kLls$E=nckbB6yBL6?mGknSf_4 zfV>g{Vga0t`0tsBAEgScp>HWHsc*AU8NrdxRIsTNSRU(9N-wooJr?G~`gCiqU^m=g zuVxJHG59`SWa-OzaM$X3Pz1f3u{t^uX!&J*`8gT_$+G~3*bVcUJ)@)ZKn2fbp_zkX zy*E*Eqnv^F4Pu3}9e1&wH@%4W?mJ2f;^&n34fbMY!W7H9B&uftfV`88MysJzB`qY< z7LY14%deaCifJu-d%!3tzh2V0Ru|f;%BCjVbEhr!lzb4zE1=yFw2}EeVdf{XwNE@D z1YKp{KhZhlc^Hb8R(DHh6VEG|uwzd0j#Hd{M7gqFXh9p_idAq@D1E_dn^$W`Eh7vlX1ohF<+b94k zV#rLaryj=}9Mud<`X0Aq^s4DBd7J(0HKL_8Wyw&Y8~_DnTxLgyhD|(8(L}Qx zz_XfR_tZ2j1&a<#o`10PVJZAYwsKg<4)0)B6|Ji&C$!Mve1*Zvwhqhm8A^0ZtxNLR z<*RK@QfNQ+cvzhGne|HVW{UZ@po@OaY{2jy{2y%g@F;#Cdu4e4z#rC817!@3-IPOf zT~sD{*m~A9yeFT-yt8{dx4_9~H+;;7XQ#y-hAEKvZ`d3#xG?ok%6YqJ2~ZyH#FX1j zQ4(CU1#hBRsq5L!>?J{w$gm`W^p>j1u)ZUs%QBKf-S4rXIg{d|W>Wo1_MW&`GIi^( z+W-wjj+1u71SQ8tEk`*!o--qE?*E>_yFJ6vDypI_Lo1s!V#IWvKhLe0DdzuX%-~WTqhTl-BUz;qZ_Ax` zD8AGenbXM0-rsbf#4+AXT~9|gJ$JCD4K~Oy$?qDL)JHqnXStL4vCMT;xITflEvrYJSfRHYqSvzi zqbBgj*_u%S3HfWO<`eSbFE9+2NVM*tw`f0Eat^EHJ69=fcq{EZ?v`Y}#K~?*T+1$x z>Y|@aNe=aBULCxme=wiXk=ShejP5M(SJ|}D5&RieI=XAW?XM}TNJm=Z7i@!tl5mEc zcvz~q#Je`ntct0BaW*G;ERt}Z^^8KPc0=daSkve@K9_lp>B-ly)G<$ZoNjmh-D_a4 zj(OmLMMJv*ET7F@AKR@H2+9TEi!`5%h*bpGR>~cLKyOoKvmeLy?|(|+DM^%%1Eb5o zuDJe*IpU0|NOyl@rDP8`7L+% zKVQ;O-g|<@PY8x3nmr*suFL1tmx?phTkC=Zcm!ABKUIXvagD z)mwli>mFkw=q>AjQa zu6l?~*1V*nT!oTRL9tQ+bzSpxBxgv!V0UDHO`8x@)ar`Q#;2(f_Q8{h{CxJ^lg2)f1H?VdO^Vo`>*)kGy}U%*B9(| zm7VAAX2iRQrl8Ul6WvUmGFC^TIx1#OUe|>6Fh_`v9B0dM2dcL@1FZuJKDec_lb@FL z&wHSMZRmcb_M3Tm0i$pmd|m&w`(XXvGQJowbZ7_p?qOCiBNC#mo)OYfY30Gy8NK>j z9ypZvyyT$Ox@U$_cy>1XVMgbKzshKEnvmCEH@K85i_I#S-j-wb=A+omlMhPMnqU<^ zU1qlrQ!DTD)P^{eOdSOmR3r6NdNsB-_^l2LTAyu1Nhs6HZU`x3kDu~)E3>rq1q;hJ zh9E!0a(yZDk}8rC?XR_Na(>*vlQ8~vyP;5Yg%k1?_8-@1^0#@k(e6C|_Y+5SP@ewz z!!W;X5n2(4@)Kg#zl&x(e4MV&_+C0_n`AfmSZKr5LG6aZELYWEo_dgl6$D1i0Hrw& z<`nKdg5*TRpu`@3v%PtMO)5z5{38tN#LKv7`3i_OfnaTOiI-^tVV~!cQ?K1(H3bIY zr(5h|LAQWszzF_ag`ehwZ367DA7-<_nc=;|+DnlW6+U!3Qinq$AaamQ>Bvkk@icME z_OtmjV>}Ni6lG-^xu3l^GimS&P}-izxdKQ26uJ}f6_d{{@>CxFU65T;Q2()P=>U!^ z^lXuy2pkrZA1?NgQ}?sb!cOebej_WIrFZN<38#gg2clV0QJC=AK~}RZiY+hd6dC#s z7VxPxZPzlJaPY^q%y1C2YnjG)~c6$(ea&9QUi!Gm9E6hI0 z_U@SBqWGTWx*ppuNpqH?0Ci`YV ztm8+cK(;OD=J*^TfyLp;o9Sh%>ZH~DhbBFYDEZnON>`KHkbF*bNnZrVQ(pQW^0uRM z>YrgH#c_hPmc{LgV26vp4q6jS8!LuBmK=7)s>AN3gv zScB?ZA+8KN>{R_ zTUq=6!ZWEpmDkwG#XT8I5hIa8tAac_BI`^R(1H{m+k$r`b%6bnuM2LVJ!zE zg_*BxHyq05J$4>YHR|(d8q>prh1mz#7l*?fKSKHJ?BO|%dm@0OA2}oh?`O`ns~ty$ zqheND$_wYuF++V+NY8z$PAT5d|Ki+#{b|Lye+O%c-7prHNB@I!|FFW;^J#MKJEoux z)>QwaY^N^&kiB7w z;$L8UO!M4uc*e5bMb7$=^(vVfxCa84EF}{oNWStxdzN`0un$T$3&wqH*pev!FK0sB z{8nhdQaoi+9t+8LPq4Bj;l9ry3G@MU>z;-+XXj|P6K^ss?ADTK!BoxspBWesrML%M z;pz-R!|3$z4K5PHs@c3}qO*TFj)kMbnW#&ZHS9c13ZHe#fqoPnRmA)wqBUfQ@|=DL zG9KQkj?&N1!+@xW{u zKa+_|dq%B-IiBfjmU=p)5!$T|w!z4yIdvf-}Ov z8Yq*;(VV%LCk{T65$FOZgUJIc>g4xW-m^Ut+|U%#{cD?Pa1?uPl{&L#CWji3dGRz{ zpor4%;{L~&SWp><`L}mj+O}Xe zXqg{h%Em4m)!Ehs{xm#+rFRST7y^DZ7}+>Lz!`LJ238;?i46)ju%=~B!hmnK%gc`P zJyNkOlh^J7rZPrrADq*P*oH2b1k)qK9Y{<`H3_)Vg*?OjSkv;rXmc&Gl7h^vMsGkB zYLIgwa+W45XE6An{8-ou|9}kQRJCRC-`cV)f@QA=2nj(3Dm7yTm@LsAh+^!zz>PEh zBvhkE*xn7QT6_tWk^fw-aJ>(lYv#}>7T+n9HLsY$k7SuEzvoA=2(#30FxGx*&1I|| zqBV6Y6u9jh2k6WZ-3ZJdOz20nuu7qQz%5p!e()8$Zk`}~dyu7;#(UY&=5+h9#F~tX z(91j6bEUn7z-jD@(s4d#rh=WWwM?VvaQ4#y)^Sz0m?}^)MRBcgQqITgoh<1OMImPq zH=ai$0qQwvDx1A3&NX-ih}KlU6>Q_Gp02+yS7JApBi5NcTIDJ{w}Cl7A3CI90~YZm zG)eJzr*;YzG?maDKr^Jqti;Q9gZDr1wZsDMq_)K8rm$Jh$4312HrBU$XR|9pZvs+5EZg#iFgTlCw`2sEFqtcoEbexB7_c@I=0X)qEBjUG zwvEM=kBuxE1UIAuZ8_nxcU2qz4eX7zTmZ_IFTcUwEAJ!3O=RDdkMX}a0kv0THYuCg z05P9FD|zc6d+CMXxF5$s$Lr(?$SV8dA1SH%Ek}|S6Vn&j(6z(W3}clHVO2^7%w;cd z<1VMkZg?9cXzWMwp4ZvI7ot7#l2L$C$*%|4A1}<9?tBFzX4a9sf%1UY@!?n!Oi#{~ zBe}~%Z^O{M1$zjuF|FgyrC0MvDXds=PQD5c0;=y=XjySCsKi^$A6Ezd;3vpvwVMt} z?uu(n{`kl3J6?3-JM8X`r+Y&zN$4;*f6rQhVH&^#}X1CD7(Tv4a7zOVi*ocY{ z;m-GLUPYIVKlfEK>dSstq) zw(8IWtN?m|WgI_2dD)bPRrvGQATvEC!$sQN!2uvW(C@shFE2+w;~}=ZleQac_e!Wd z*27ERsJ+_PKl?#t{6~v@ypr5vOO%@8X#<&!wRj5LWeB&mKB!3Fm}NDgb+83xxJDitlZ7SS3-ns zvsvLQy@mBz?CnrW{}kql0@RIND+kJVLXLk5b;q%UHNL{~ z0`|n3qU~R=8S7vkkVoS-+*|IYA^~!YiWuciDiS06t4O@;sUYsMP9>ztkEScRMfrCX z$(4Uqk%{tE70HtuRisEhr6Ni45fv$xznyMY@-LNlC?u&Ax*=~>ariJe`E3=4CUA14 zio|OiSVT=zLoHKDh_4gaqP28zV1cq8FaD&9nR zfQrk6d#HE|;hc)M68^`N)DnsAUSwIt;q~U^?^T?xa^waT4crM|^Dqc)@fr>9Be4>h%5}vK%s|ing(yT~bNs-=a z!aBlZRJ@4r5Eb7_xVMULBivENs|jzNrj%1d_)QhBBfLq)8wfwA;+qJsQ}IULjL0`? zq=^!?sklt|78P$He4UE768^G^(_vFCRdITdD4SHAegZ)*Qt<%7r>eM-@LUy-Av^=P z8DmC|8{{N4A&Kxf6{ouq*{I?o;eINfOSp@QPbB=&RHgNKgtw@85#iTWyqNGt6<#kUe(rsCTOU#jBOgwLDGzFQYMDihNN zSGrFqDZ^dFWY1)k(p#l?gVIZ-M5vUBpv+P!-YNy|Vb5tQFt7`e*)Q;)l18OY*z`(EKul1L-@JbQZvd&wry2 zsPR1%pC^|wYKY<(ccH`3BFsNgs@y~kla6NR>-vr5xc-wIO&xHVY}R1`a_BoGdL53u zc0&c(r4_nmA1Gk%2kh9UzDE06^>QZ>RJ$P+EckdDy%Vg&yZ2+^?{*gY$FZz;BZQwf zvZvn-66-gjOl-a>;(ogWj`(;>4;%eMGG6gH&igoN(CuE85=T0H|NoMjxL8U2kd{|_hucoL4LUE+YfFe!6Z{skk3Z-mL@#7lKkyg!cV_oL=q9Y$ z!lFOy<1q)07_wI--PXmd@Wbw1*A(0B;Ysow0Ik9I7T`pIRb#&sjp)N{=_lOUe7u2u z^I=ZFy$-1Qn0)RE$*(Uvifs#tnHyO6)()|cUQ$#ZkL9Lf8Ld&&T^EewQU6JP!z7QU zEXhN!UwaS_345{WTLXo-UToRccwzV3?31ktjw^qHYvkJ2&W?_RczhJ<_|r8Y-9Jhd z9`t1MJ_-(1*Fg__9R93$*n&p!P)I`B>$8qnB$6N00Ivzu*d3!YogY#+ZM{Qf2z_eq4?0(`MR&Fg7r zt3C;0yZ*Ia@KI6(YaDy_laoRy)|XFXLt7M;_Q03@*$uWBiEb?u2_0tit~ik7=*#X znQvz%Gk=zzP@wY2DM~iQ;fD=WbEPl9QXx5x$*m8rgT z!3QlOU;25KDu4PnB&w*M`?o@e26x@oOuZr*Sh# zDu|_hp&v0V`k(h+gSKG2t%d*--FqGRFZW)(!I9UzfQygfP>WtsdJ-sDF>i zTN}|f`y2zaZto$?F2SW(KVh4m-Q9jf$TQ(aY?kA`b9Vdou3r`K!t~X+4KoVgE@EGN z-9d;6SMI)6E}{jz$*A0YjaMQyM!NeF&YWY}JD(L^+(EUvAXLcSs9bw3bf;@C zVbeYKRCS!N=pL)AjtqKp1189SxbnJ`!cJ5hg?*vyR&}zlXaFv@z6)A%8mrWQxYSzz zCo}HuDlGVO`|#bk)Cz?2Gzc!!dRh+XRG zU{W~c_UaleQybp198+(veqW8-D}Q!-Prrl$q+qtlDS(=v9eyU7c1w~|7r?$H{tr_ zbR#5PQa`S{wTjul8OYMZJFuxIU3*M<5w%Yj^Y6o}H48YVpye0br9Re)bM0`xW&DYn zQk?B2$(Ga`QyDwixzjUfvYJp4;*1M#fBDl%?b02X#2Qcb@!I|Zik&Uy<2Na~?|Xp- zpNbaxO=W{m#Tbv!iX>(Hg_$Uum_&Rv_`do` z^Vit*F%@Q%!zRV$gH`vhGS>5S&KMUWSf2CXO2T1JdhgzZH3^>xcT^T6_Y7+hp8;oM zjLpG5M63LQ1&c!Yze@^;A0LkUnopI{k{oS`Ir8e$iwsd+Thx$EToJr$=DHOdiWFo?R$Z8rZON!NS=N zY}UE34)48)T))9OqDxEqHI8Lt*t&Chf@3**a4tx|VZ{HuQ4kC)>HPFbS4UInXdEZc z2ip7qShf_NQ+{1v_7m{ksrZL}ARr~4UhT+e8LSLqN^VlWMJw!Igder)vu-Y3DB!!S zwY6e$pV#KHmh&-&yk)d0!n{|HK336HKD?N9zmPC65bSuO(Dsf%cYh9s!fwiiC$;ml3e&5|Nx5)y62(8kNi4iMZwS67NOkd zW;6T6QPX>?M0}(97^(UAwWX*!{1#K4OHM@(NbHL28U5k8HI-1Oz4npazE?nDSh=0> z1=X+5AQSonrH-TYMM%$qCl(%JtDlqXK7@VV7}50|C@1vU3yX;S*2uQeCwKDU?xd7< z{Q4O(S9fO*8bjR2kHGnl;%mCI@Jn%gcQ)eEn*r84>TuEhLT-k2D1I^#w@T04VfIUb ziJ})8xsSd|u}**mkKbp)$0TDwq?ewkxtBZ;g=&wt5`jt_TW$_y1>Z%RBY#jfUd)4a zVr(P!%ay^l%kupkP?K~e?(!*MwH!T67jgB7C&Z|hA% z3BmgP5E`EZ6IvRhJOn)OJg$6N!LF|Lmf-=$YP%gSfFvwBcMqhQJmv3(v$7x31-~(*+^#ry9I!cK zEp^9>o&WY^aQQ8^aZZ_cG_QM%s6Gk`$+%13*z9Lu5>)REsN=AimhS9MR}6m*6NaG3lK z^AuOZ^a-d73P{4COP)$Yj+-A3c^3)?<$_AFmpCi^WQ(R>Av7}YA3991x|52VF5%l5 z{B(f&TPtl}-J7Dk>7oN?E&2%}Q??xc9)9Ccokb##wM4deq2X4hQ4i5No>wk}>*Q@I z%Alhz651I97v&8eFcy*EIN<1`lblQH5qs z)(EW{^u~|x(qDuIkJFEp;BU7EKh|KS2A6Amt2MY*gP&;dkOnVkP}blh1;Y6s&|9sb zuLg%{@JS6W)!@q-T(7|c8a$)HYZ`3Tpi>{UTt5vOHP}sqeF>s|PSgnFHCUj*`5IiQ z!L=IPq`@yWctnF|G^%7U^fjWX^?*Ej{YWVaDG2NULma12#p$4i1=r$HdX^Qn3u@o7owv>D&0_Cs8OHGISK>4fG;=WouO^d_-r2I8` zs{F9y>Cem+YlJ|Rz!|kTju^_{HV-ue4lK%FnieO23H`OWtLb=|rTlHx;+?d3lbf1u z)Z)cjJY0#7!xJJGl>wWo@|UN@pVi`NT6~!nH)`=mS{(ihrAO^ryw#cC?tZtkdkQy$ zGa=05jND9a5&m0%VB}_V^YQOIq?7>5=Zcin0iZ>5i#e^IcXsFb;XF1*^=}+FXK+s; z*G%x4P?8Zj7a>M<5Up4#KOgvF{AWbjitGH%HE`w-Yt8*hlZk=~22q!M(loo_*j{MQAJCRpEE#+(tXK^Eme9 zxc3k&sDmKyAyhr%!sqz5i5QW}af1hP+`Fw+7hL!-uR{wtu1N<*{0xxeQ&kUK_@Jt> z9(+KRw>Ka16s0!ceN$jhS1!!-6&F^pgA41uoeSf)ao)KcN9o1L?W2g|tcc!L)1pxv zSC8M}u39OR;mFQ!YkvzAGQ5bP<0VF58;l+(-n9wg71A)fIpuOj$8+HZHENC(rp-;G;SMa z%MS=`nQj5^JnipHyssnY+uM`#ea$_W*M#DgF><0Ysv7CW2e$X|J9!-U=?sqRe!ps& z8y`Wsy3vh)s=bx-=WyILgm>Fo5$MJRnmo9`0(UMDY8E)!(9)s7r>3n%-R5!Je1uV2 z3vT0F`y%42v^Z~Y;S45(0%y(u1u`V~)bM^#Pg2pbP*KpJrhIYLJa<0C&^AD`=5yTp z2yas{RhN*|L66oHxI*(>IQ?k%V(PA{YIi=m3O~Bmvt9nul^pjag82b4KzEdEP*&7K z3d;w$VA>#{?f}pN68wsNNXFjoBwLfd!L!D_*e$P3kWVF!yMoYOo+7&!WcP&Z9-QB6 z-o>b+>T?giPkSdvZsfS32)plB-Syzd`?nSL;)kFiG@(_yKdgGnlkeP~=fvk&@)3jw zRlD_k5Dm-)J>R8jgC`$JkprH5-!7!nwA-q!(xX*goc>F-Ge}2yFM;z)aI1>-;sdL) zy?B@Qs{eh2<8C7m*X<)!bG-PV_NvS4IIcrI#|?i_^@bN8*1JvIyP$n@l;dV=eHj>r zu7AzF$+f|`Mpx{Z=a4JJlSCrQtZMb*L&j6g7z{oBA9}R@BF8;MXzNiQA1BTSn&p%5 zUwi(oM)W)aI&qF()znweF#&DT)&Gp2oj_^_PfdqIyq zp~ukYMTVw->ha-b{I4kxwrl+JfP30F?(BoA1K#|YU|$!`cdQ%c8DN4}jYqM29*y&oH+s+zsekc`$;5L@HM^at#h|SdU?2tlf3yGpoBjVU&HkQ>lK!8Y{Z&W(_@mwh z{1XcnJw3yeGjs9c8M9_i*!h})e_Q8UTC5tIs^Y_VE#B!H!Y^>;Gj~28&3o}?(OG54 z&|sPdQ#6>Q!QL8-*I$`En%Jp^E5b7gQ5nLG#IbJI1L&# z=%T?^o!SumvZ%5xv~@i`XIC1C|38cZn+Vbwvr!{7Xs}L$H5%Ne!A%;h)ZkJL=4mij zgGm~U*Ibx6qbQ>$wUP+@V93M;8xv?Di1g>V2XhqnU%od>${pE;OYeboe6 zgKY&p<*(9X?y5)b|635}6^?=;Iksx9iJAby()|BB(5(Gk*UDY4mHVF|R;>S=piTgi zl!R;ltJ=_$pKIo@Bb&TA?gm0jcl?41;7A{io7xrs-wwbv9q?rjc-8{0@Z-2R;H7{z zgeLIc1oSq*6ak&!K-{`{15X3Y4&=CrzzNocV0Q;j@C*X+w}hb6^f--rMD0sC~qVgWn}@L2@nSq>O#RBn*@9;;6sE4;EwSS1o-cG9BDys0rcvnR^SbI0AV?J4gn6tp|})0X@CzAXjmTsKF0BP z8|Y@PH^@#%v1ez%qnJ@K*wEMQ8%P4e$$u>r@Wlegql#A;8xM!yX2{Zm^l-=>JOb2$2TB zLvXw~b_Nq*&PFCu z0B<6Ya0I=ls(A?xnTG$t9C|1N@Gt`PRvqAmCvorwz6sEL3I9;S_keda++#Y{00g4v z0$LEp0xtvn7J(|O0bG!$;>CdW861}fo}{NZ?nwm7`!Zm20ZstGZvyt5sWu=9kbcPp zJZ5elkby-Uw*yRRfO`=dfY$&HnXTd(fb|+qKN&O@XLuPr1Ro=C&~|PvdJBOxd^unp z0trEI2hQv<;Hd%3cp8ccoZzvi&FWZQN95>yNQZ>ufZhw#mJz&*KvS+2Fl`|OLInh$ zL?9*213ZX8mCkjW*e5QR+{T$$}l~{U#w*dM>XsRFp@Ertd@g~6A2=jpd0l4WUggDRv`${TL5@5%d zp`73e033;6CaH6Qyo*4Yw*U^bs@>GsfETu4RRz5f@c8@c`cKgD1GQp;DH=}j7D9G6tpBC(S?ouch|Gro z=Y5VH95}&&+n~L`(*PGC(0FeGv?Gw9+!w&Vgo1&G+m5bApxKcNcz6eT0`vwz!&mBh z5CAw`!;1j}%wHn|m;wN=Rdd`Qz^?-?+l33PSacEKce^q6z`qBaya(D3yclrHUT89K zg0CHb;K0`catGBtg`nFZmF@xfGy?T>DPa6zG!p#gG$77LpcIH00na0>2Hptx5aDIu zj{pN})fQ(1uB?Y1fR0NI?koZodJgc%qtJHHn*i@2C_@Rzoxnl_Wh7XRK>Vu#C!ECi zQ)J>vFjGL65}bQV)!2D}cQpJDz_IYG#GoL8iH#@-cyB=WOK1&n55Um~ROVQ~W&~O% zZUXMPuFi;W07w0-4%675F;)8AfMz3e5}<@YnO_F%|BI@0DS$T+TEHU%PL)+b3G&To z9ju@@z}_}=D{z9XH(_7^C+P8SRc{D>i9qYe4!{Ms(d%X~6$7y$kU7QOLE*n604G@Z z2b2qV5nvU9(nEk_3Z z4`n82KY8yf!9>_?wF3j_%((T~hdkym>PeL4al9h{4BS%NCeC=)tyl zzz00H)#dMa&1dK?-QV$;9Y~Mv$?*kZ8<*w}={ W8H)=RS1fK=yuGlWv7c$80XqQBh9l$v delta 27635 zcmeIbdsJ1`_cy%vh6Bj$fN+G6Cg zFf}tZwKBD|L{q~wK}Dt1@-CLuHq^+xP@4Ps%)K}2@AvyY<9XjP-Z9>P9>!RH=9+7+ z`)#hd);<>RaVlQ#RMCTPdMmYe(Wfs}Jh&G0aBGWuY1pK#GW@q*8@9Dp#i3i@Q!#gI z1F#`)`qt~fYeCA^9V$Otm(SNRPsPDg@+MQ7+<*Ja?!$5UuQ_pM&vl~_6r=_>aS@!G z;L33g@NQ9ATys2Y&9`;s1dem$IQKf9_2#>DkVAN`2MJX0-1=-_G0!z1;-kMNo}0#V zoaHZ$3uae@SVI%XZ4=%K4`fZeOV?K~LC_1X0S?JN`Dfv{+>W#J$J@qpoL@JDASRat zca?P%!aBzxRbTjmflvIUaa?uB0xgzW5EO`Whd>;A6|qoEZZLd3{zt5|j3`9Bq`O3rPa0&$)LoO-0}1jKGT?SHdEQ383W;rWxMU)_e(h3RR%R7RpG6R$3_k-A1hTzb0__NgT z`Br|PBZ{9c{g^2o%a9JqUHsV{L&SuD#fXxpWXAj}I^4hZIx6@Tdd^WgDj)UK5=$@N zhjzPPikzC#jpBno3Dwe%vPn-mLpqQt{fOGgHGU+2T$JkNc__FpA z!vzFSN>6sMxekoE4Cb6$jSd5)pVjsZ4rMc((^?J33=qpZeASEXbPn&f=>o^0Bqb$i zw6AGsG`^BmZsXMw-K-gRm3*#)xFnbPV)cf}NNfqmx^)&HeTo+~|Ub4mDKF>N@{ zaaEMPXg?TzfCiJ&$H)S1v}L=qX@sV@>ZcmKc7RKa?>7gq*{6JKZ=s@Jyob6qrBH-dEnES40jbp zsmZVwOh(p<$rYs#%ZF8q5MM#VvfT8(7P%Oacin-oV){i*byzJ46>35&G@(%>bXygw zhEQiDRZmjcn$+8xR98re9d&wDyH|&u~Du)ij=czsk0%SCDkDM@xK)% zLmEx3Kh&BdN=GxwLU#A#5#u(AqDyiWQ!%`LYHn&WNOQLk|+6n5>a?^eYrdQEICrT&e6D}?D;5+b3t5Hi@2M#t< zyS!jF?)035Vd0%F{UBFi$x#<9B>^pb=_^TA{Z8h4i zdy0YrQMIF}xSx4bHEaQtRHG3bnsjf!q7({$9T4{)_PX||AM^0;;xW>Kg@W_L2$Fvb zV^4cWnj82ONao$~JNSs8|{lyWUI%aLKsr?pj>7{-!Y$Hrr*b{j&gM3%H) zlsqb8c#hU_`FpjA{bhf|MDY~UJK$gu`#O}x_=JY_M$Y;|5tN)^O|zNgQ=wpCw7bbi zLfK@Ww!*Ov>@AAq%LSJf!XfF&zwl9gLhYJJc zvuIAefhPsCd=o4BAR0piVHSB*B32=kl9T`J3`!xY8`4F+=R*h+pQ?Tqiyf8;>WkQ9 zLr^D_-jkYg3SuK4kH$~rUxO$U>IeD!BT|$OHMW*}LrOmMh?!8$=K7h8UwHD|fi$(9 zB}4SnY=vKVk>P2clMA|0I~dOPq%u|)TFbLtkXKoVJP3h9w7@-jpmS9qa%`NNa2SK2 zA5s*{e0kCSO`K@o?;&X`Tn>|-~AcvH*vwMo=28%cQ+`qk$+k;*5 z4+!u+j@?G9jVPV7|LyyVkA1(F+_4AqG6$G1AJg`ta+8Oe!jaiskiuJDEZrR2{sb~a zapOcKYB}U6u7`2=Y_kywT>G2?%llK{%g0!yxwp{Ki(N1WnUgV~TZZqqm>Ab)(p2Br zlNkd#1^V^Gyv%Dr>pdm^iombhqm?ku&zV_HK&r6P%qj!=2);eprGR81z{~;yV?w-A z)xK)+RjJZJwYOnmm-gdI z>D|Kiv~*2F2GW^*W z(7WF3RM?>|?_j^vIgudkwaT@T)Kh_d;d$AGy6F{1d!7rH7;L%F-gvoj2s>ek5N5`+ zzbpfVCNI`2{23u?2zxs`Uifn`I~?9#nBJM)2_Nq93pN5h@!3X}6Oq9Dqf$IShUP5& z>wt0|l~;FQxlsw8-}a`RqDF1~51(NlM9mEJ!eU;5-Glo2gnSb!8&+IU6}MUYHf@Vm zbVeyfDI^k$5DrYn5<7+v^q;~6F*)R6AXb1v`SoF7l^HB(Rk`<28a^+!l4lPgqVQS7 z{S9KIG7S(*@7ns3{*jD8bs2WU@?%So+mSz|W^RBexlOY7crmrmsEt1d`Q^dvWShZ! zC)TFz(>~7+#(>6_wi`PWW^A3@{U%$~_9?z2t8II_Ge$S+DmBYvsX(ap#~7m+XOdPX zgZ+fk=p#x4qA62XdB=5jJbJY0ZW9`Sq`N?^)wYJ0|71_Ki}ag%Tv2R}3FjM=#5}{h z@L=c+KhEa0i!Z_ghss6RQ=7bbPQU`uAri|b7FbR`U};%ETVbh|r(waa<>t>unP&_ z!Hy(bhwChCzMT9MSgLv{=b|)Nq@B?q$H2^z3=jRVvX>bYfGC+70FuWK_*WitjamZR zt_2-U5%MYF7w}J`ztn7>Z*7}~oeKOBVAaVp@oxn!7o)8rmV~%*2kNkGM$7j`vJ>rt z+l)s+71LjPsZR`9t&tg0&QaIlp*T{JMlX-AV%{;GirgVeq1bYvyk|*}qe+*Wa9B20 zA@>MTDzPp?y-@!%BwMa~?bSHp8zvGYKyx*i-vMh%0o} zdt;? zp*tyq!cZtC1JuOb|CZnRSxq{HlD1p#bB$KWchEwcX-id1uYXA+KE&LH`Nby$V}^;O zu8y{*iP(-F4can~U7l=Wd}N~g-;kg)cxeL^>U61Dx+R^p|E*x6pT<&U&$kv}0AyKh zSgos;QCUB4$M(br1$e0WeCUGiBz-ImYZL~$yl@+9icb=HZDZ{dx^$oZ1q~yqSuu?Q zMd^r&Nd$?Cd6g=K?)Li%=IK$n3Co=*l~WnSggUvffxVm1#lOuCj4OX_T#+pSI>AY> z*sKaabDSt$L(>F8y-1hNX3}zY3v+J=^GTfS(D@$6UQHAyk--RAwSw-^yBV{}QM6Zk zX;ulEwna~bw`~dcRl{{X?W{cN2^oh+GEgmgApjaxBeII^k|jL+l)cnt0IddShTUXp zOgIc#T(iYaY7N>XM>cf%;bENF+(4^9uKIf1A{47jR!FMSv4ud3c0^XQexKV-dn zwKad`%5zW&u}vr@|If7=b+D(7VhegL-~*XM?+L-3Zd1EaFBVV>N`Fe3j=HsAJzLOw ziEw2!^UmlX>>dg5VM8)3Lg`3Wn6Xy4bB+b{=^{LAX4!o@Cmp?qDcg&7u%ITztkY*anqWIDSb>sCbzv zbyVyhQS7qS#As3+?!HzFLNJ#81GsCD*#6AmZg~#a6?(gr8OExFdzoPb5kEqAI<-qt z%8X@srBaQznL<3fe-XaKDh z!c{Fq$O-mpe}CWCZxZ{0jZ}U4(hrIvo9?oY`gie-3P6(;`tG9TNNxo!rN&@nxB8Fd z9ofJE@9~pZ&X90y;bb^ z3I5(w?Axq}qIWfxh~Te_(Jo>PjfCSoi(J`E| zE_)U%Ng@F`m4wS*&e;bm2Z{gN1W4}JWNR%-MpZ|#JEOb?*>s9cA7~ctpJcBO3>D6w zWE%&j^NH-*z|MR;3mOy;?P$=T1k1}CwBrS4o8)m9Ba~Jw55@Eq_;!!0f3Xh+MetMD z!9j6+G`lmXH=n_hpH4B>uZNc8;R<(kRo-c--d zWyiYCMR(W&(a;{u>lv$%RnNLU6XWcL>X#Wp*yLxzL+?`$P_=ijqe*59g*6oe(Va$} zX&>A8Okcsjp8fGmKVkKGmNa-y@SmG#9v@wcXb>lh;R$fm4Sx%(8Qey=yoUWTIH^q| z75=E)bqoa_mD}jJ8{0qpSX(Wlu9c5u1BVn3?MXvXI%_jSM@RrIcYI1Mi;HwrF+~vL z7m8hO+DEyTd%OvdUqu12q>qHZC>8#XRgsW9RU|c^g%1si{LzSnZD9~O4ocpS!y(FS z#KN3#IK4buP)uL4aYK7`6tMa6#u6UVHHMp5+(B>|i=uu8S$f`BdWyP0@<>D?im59* zG1TALQ>D%9#?Yr?&b&{-XXFf+V8t?L1vVj%JveTo0kshL*Q@?5?Ac-QqXvASRwEW5 z??gkRW}?PQCf0heD*BXOwHbH0AY<7JsyDoaZE6e`vE1_DX#w|eOijliIPidMn;4G2 zzsIf)E8+uK{_wuS?+tA8@HW9K#$qZoqjrUeSU$)>E&~G%=V4D>&2A14DEja&QrY7R zMGGrj&Q_|*F&OXUi&(}J4wFXYi-;ic| zq(CdWb~UmJ381-6eG(2+i^Hr>S5g)^Nmi6Px;q)y5t;ca)uO$*$J~Cz83Yc}SjkpA z8=7+!>(qaW-vE`a4pQ03DN{Oy;>w*>(ZY0?F+a>gvu9DOpHQ@(`BYH zY~IK|-u`Oj8kdxl^A|NbNg>Yc_{dnV6)V)%?IY&>f%$ROiMfvI$nRmvqpbX!Z0xAy zfDe~b1!Z)OVjD)-Wp3x!ORZd^QUjofcb)v6*lKI!C&_4paZQ ze7%O20QLSLx>T`QCFuIG@1k0%BiZYt=LY_a@iaGr#w}%)QU1P+Cd)_(IeM4*j~Nl$ zBZu-=qjwS!zkRY{j27d#VhU4Ztkz@nVXMZBkG22L2=#Z=ED&L7C1uePp^CK|J80Z# z9N%A=DCYfzo%$oxM6~y4;%wQXgSaWh+eWI`*{%1g(ws&Re1 zpT_($mKS5VWaZ&f&k)8ubNyY%9a0bJ2E~DO%I)Rx6?BmPlJ8GgN*5hsujY>At?X=W zxbYTkTjlPxVwq7f-C(TM_-FZ5Z2Wk0e9SV+`IxMHjc%|~dubW%+B8T({>TVs0)ayX3Y)k4kQ!>){vNTPiWWe z<)!K>;!kV){e93-;x9o%QO%$UcN;x2%SQjpH9E-+2!g4><8}iL^P6w6Z4+X73%fX> z6TgUg|=EhfL ze-Mz_Gm34=Pfmt13Yo$LxeioDnWeEE>4If8sR$b=GlGb?d2~=$ip%F|X(4>SOn0%^{1d*H zj>F(3$lvBMkIB(O!tm|gCU@dlU|z?7GY^pmjznfT6l$X0HlJV|veOZB9Y1;i;_1i$ z$HhcUi+so9C+L29gPnYi8LjOiH-n?L*kWq2)$o;>uN4!$l=Q@MkJq1LqpZoP*_vEo zqTCl0wOV_$T07iNb<&!x6WEWMEkgCE&9)U<9Q)2{VXZoQu&GmAJuZNumvqW0wqQy( zp(2;HRE2T z-#dzuxnD8Ze+?TTE&kgwmbHIAs8tLiBAVk#Ht6}OgUvT-eyKM3AX)mJvc?X{$!k=S zw#;h{26W;;G2wgFtoSn)VWeH~HpM$LKq^7HWCt1@m!^4`>$C)ZEbBSVf`7+N^UFeT zy2Ck9`KC{KZga+>jJV|V;(z(VjxBND$cpQp-o4fqf)i4}BG(qS7U6G1r8wR78tk9u zE4=&~`*E7pW8yM2w8Kx3Z${NzrY}W)E2oEAD@)a(MgHhv~+Q3(|qc5sGPgG414r!XWv49gQ*7xY(qU7+u`8^nVedd3vFvbh+^hw80$& zU5f=1S8#!H4SX1~YaU1KP{;PpN>7~)PQrQE4Kg1EU4v@RC7drkY+UpnsFc*cH)vwK z1v*RmElawWx&I;T`>FkIu$0;D&2E&mS-yI-Wzsc13nlR1sqB^6;fbHCT(ztX^t)le7v8k-j8oDNkl~cgbm&4i`_NuMjAd`f*0Hq(IPPAV# ztliK{>kmUOO{uX((@jofR$evMsOxW_GE!~2y&;S)bp}3Poios(o(Mpv{uL=L~f z?%3MVOugGHsn`}Pmq5?bv-76x{MdlP@j|`8wiE^kO#(Yy_`B9S?CKPd>7W^g&LvV5$5^*c$c1;_Cqf6^}*X`%$vjuIhWYwlfgkw*uVRttQ3}R z-1k7tyNkM3DW+?4na4|MF3t#3Odrf;V^5h~F4vN0!Cdz0sbI&iVsScMed-0F`5yCp zImodSUQ)0QF9!;H@3G96I}3O7+5DGd96!T51op|x?Rh)9eA=4g3NLJwY1F1tpKYXi z8lig1V?R*aIjd~gHoOnWAY1ulGL@%)MNb6LGG%twWqz!%@_jb{hX__W|I5HHp2ozw ziv(iGs9r$EwY2m%>g2sW*|1mI2}^r!FMh>^_pm}s(OdAdz3lB*qlRw4mO+|tp_lv{ zws~##;|i;BaE5dOry06Cf{FX&Y)q?WlEZtMzOyhngoJ8j*9yh`1MwQ`(BEWuMQ>kX zo<)P=3&6EoI9Ys@>a{m9EJ_d0o0F0{;1b8pd0vz*XGwo#?*A8azDxEcwxlSKk7XYf zg?IgFPfJncdmxdHHoI4#w3yvN@)eCM?|LlQchAydGh4N|vqzrFRA+e8SM1zkE6i)k z;%TnqXj&GwmG^$d3d=_a{859+Mm7mtpbz=#QNuLsX1|xO6^7QZB}=S+H>ab0@|scA zdAK@LFQ??GKd{;*;jMQf2>kA3f?y_&OMlelg(YI`Bg+ud$lG&1I3!9}ZLM}Gzs4E+LGjO2sEXidM2)Mzu{0)UHDshKYllzfZH&BI4Gu;Fnj_?k-B5y3)brX=xd5??Q&P(dcw0v?@1}OS6ok`H6~)zt(45CZ z$TNghyloE7MG};E2Glaq9+eF*db%x)Kr4R0ATqNHZ~OVhf36LrUopVsJJ7KpdG>4O z@=h^7ij}={fgi~xRZ87nBO?*&`ZT7DSf08Dqq*gQ7Dm<~!%|Eutc*#MD9ex+AuX1r zp4-c!mp?0Xu44<9$9abBMI7a5LN(q38uzlVmnRCtCbB!rhk1KWgm|8enm8ly0lTb{+Wi00jXU#Bb-bPv4Dgbs35!%5|oM1+Hk*+C_aB zjFaF_jlGT7orJGHWCK=&^xlM>s&Ot&U|b|QsHtJ}DW;}8QvJ?kCY)DHSx?aQph-HZ zC2(gR`(#B-n|P#?dGBOaSx6ix?e<9>!gcnjXHK9!@K(95jy11XDtunR7O#9q=$XT; zt1`@2H4WWYj3z*{7H-aa8Naz8EMM{ zK4Cw<*G0(BWr3@Q_!)Cig0emh>Z;yd%%jI~=Z~@@tAk?w#$uG$$~7NjGhc{*q@-&r z9jUyS$Nja{aex+KnHnMV1Z<5`EvEjfkj69I2E}v=EDY?ApTS$GzT&BKE1J@&?EsLvL7+WJf7&10^@P_n%N5|G>dda>Mh|Q|YSqGpX#AQ}SM{ zfLKQSC)r;)6Zg$xXGa$?Y4r-+tef_xU&;xnh*mwFJBy&%8ef z5XPQju^%KmzxfAh_{~N(?E{NYag8ncAXu1vjcxv*t^euHYD8nnUur3|RI59)-#>^E z4p^E0+UE>X;`Wg$}oGA2vmJQw9GqF2P0yr_24`^DDeDR_>PY*1ldHVEY#^+#2htQiYV!|;w zZUNi3C6?XV+**(qGN(_M3zsU{nopC29z)r=Px}S;hIvLU8!gLNu0N}B(932hVX0f% z3pZ-mlr2HsD~8YTqAPiff|u3XK2J^IYlFqCU2K}rY zt9cetfJLJ#i1yX!iRdPc-bVCQjjkg4%slqSmm$x5_#UF2`$oC2Q*dz zSiTzPGmSG399NC=zRFP-6)^u@rMAW!8k1HRFq<@Hp~j?T2F#x|W-c-5gb$@ce~$|5 zHuI}8dcFXwVv7>t@D59;iV=Fg!*Z*xuvs5%bRivt{M`jjtZzjirH_tX$6-ZlOoQp;TP&D}ybo|~F$xFPHyZ0J zKqKD2M}4gM*O7mkynYQ;L^YGTkh(n^lTnl^)=$KRpBr1ZWQ*8}joseV%i^WN7+$V_3$ z`yvFxW;SMDpm+_6A*n_^#QlncSRRabLQwDC?5!rd=ySSC#5GZo>PI+z05e+)%u@p| z(Fsl-_W}EQUvh|QN^aw?`)y;;b{prW)P!-i`;N}t`}PRTI46cY3{ zC&MbHOLCTUD@(d2r>!D$AYPXCRyE{iW9ey1soWz7N~vPnOGQvS(OAN$1Itaf#t;i- z=P8Wd#k{2UEI2RpQYEU#a77R~j%K^Lo>DnOe2Vg{Ca_3Ng2I1L!>d}tyJ_L#qwsxd z`0|!;e-JRCBjOhbpyrG~iFi$1F%`7P{gIK5;4;G^NHuz)@FA5zOvw8amcwUhnhfJ5) zcl+lFqa-%qoA3zBQWQB{GMIs79<9meBS%79w-jxAgDv?cdeR#+FhQq^rQa7i%Nw2t zJzBK?)9^xXlq3a~PTFr##Pgyt;|xrS3EyYYqlSPqwUQ`$;ml;i(f6>_VTc@+1J|-hI*fzroVcE<>fI*z@gU{2JhUa?q!#d?j7>PuCC~$G z{YtL7F*Vb;6_=^_G*5oF3;XOqd*Q{;*u?`~+y`$(s?f3}LvAq(J=me$i)D%uo+J+j z=vBSENVP*;+wVl3cRvf?9+n#>e9UYI2NsR=Mu*UV592?T_ zW;nYP8%wr>Enlrv$2wkcjm8RFt&VfUNk}G-@a-?TH)KlgM&q&rib+8=D6v-%3%Yfy zw355V655v)(**>D&$q&YaS;<~bRU-XZG z?lT&LwIdaMu0vKSS@DPDFW;tyecVI8%rKS=B3XKbE8?-Eg(~s(CYF9EKxo~AjXu;( zNZ-WXJ`^r|nZl|LjTK&sXXcu4;n{eWQWGuA+Q=r>L!xhCscBM79E zu#v5+c}@5P$E3p%LRfe9bnPDD@Q11|HJNojd|Wt|#P}mI!lw1Cex36jOayHugxDKyN#BODi;<+y*4B_NxA-y=XyK_UVz}XV!Jonr{08oF?X5 zMZD!+kM41V=(Tweq*r#?SYCdnNhCS$S34-p=9XUk3KwWBiHC?>iIY$b7fe}fUdRHE z1_@t}XWfpb$7dnHZh-=<3Q!rIFkZGrivP`c8M}^*mx4<;J9O02b>D(VB|ixg32D~L zx#4IovR>}hxboYN1tS)a^)j(8P#C^|W!L$ehOI>|ufRl#FA;US*2uMNeqG>%t`@x2 zCBxR;46_Swi-EOmYZz4$Ys=KG)GB0+D@dd+qO}|Y%OwAeLl>??z6a#s(KgD z`^0$58eE{*uNWKgwdD~pZ$I7(RA=T@iw?DYjbE0us&;1wPc*x8c51AnvKhB0?CZHE zcCo&ruxlYM3c3ku+h6=XpBFB_jT-`saIBc|r+kEhj_Q5D zl46>u(H+(MfJv&yr6b)32)(YdRi|DQingk^0bgR_QEvk>wyL)QuLRR=fRMD6eSSJt zaNWupPDcj5gnN=8 zVTaC!Ck=ZPQ=Y0NCTerUoeGE%87vRhGim==bmtrvb}rml_Y%pR)o#URzR8|G*R!4d zdbIuThxm?g!%#<}sm%Yw zu%wMNNITrNVfinQN<~C8i>(XHi}ET2(fw64dEO*Bn6~rIi2LswEN|A!$=`p~_- zDy2usd@@Rp{~DzOMQPU}j<+@^D)uR0{FD84$En@vQ zP7y)|voAKz6OKl*PMcDN-~u*dQ>Ks}$#!ju5^iO)hE1ZdiJ&WUKNjfZCs*B9lSP3eox2yzJ+Kpfl82U=!KAjaW_G4EUj1`XeU>OS|g^zk{pRzEE zcXz}65lz6zA22WW+k08}77*jFQpLC=084yElAp1}3xx4YPnZcSopdcKdfW6n4+?{AEfPwZ^xxlp0~2=?Q-HeuVb(_+uL18p$a8TY(?a=4m;x`B0_ zz=F?r@w~SXv8RZ6XA#@M&c>aO5?;<>ubq#!2wLoaF*)5G=wjVZo{6Pa-~UR+v*YKj z(PP2EW+Hce6w){lL%ef}M-NK4otXC>HarnK*@Xy`uP%H&_fax0=CWrlbk4nmC7AAG za0xR&O8iwl^w{SoxO0VxI|I z&H?hw1yJMgEMtJ={wrhyys#YlvY#$Q(=O(IF<6j&So@1Hp$c{vY)j74p=)14`#|5H zMofHg0;S=PVm9aEz@quYu)mB_gyT@slIcQj=SZxQb+M# zBYWynjBvMzO}k{VtS6%m)t!skUcg3Pu?RiA z+59WxJcf;WG+KWe$>b}+LVXHz`YF__?m5!?{!KS7s}WPW9D0r={uFIGHJ8>391OKX zbGg_`PA*`xev0oo5aM|2&~k-u=m0-DIFni;L*DuUHH<+Vk4ikhhPw2{7v&Z2tEzl~ z{LDyp<)<#GE3j=q=m6dSAQq0kSTegDRK0f#E!vCTLQQCkF0@(`dVdld@^b>en!WyW zoRB<;?fSVr-<@6kIU}fcBHc2%Q0uj#!Xx3A8)qZktrJ=AtIv$vMCp+xEyAoX=HW|q zYc0NGuXPz%))9qCMf8khtc$in`m}KAdy2A8Kw?l3obZbUW3k0hdmz-FY{4EP7(2l8reny-ADpw@EY-75s1nB3C7`p1%aU9T^VG0r}rb zViSLf<&)UbU*64MaD|>`X>U!lGs^qnCu4A@^*p$=E89{<>7p2e)603|lKm*3Vk*Hr zOaGeQf0RFq!ymqP$44=5f=Mq{F*7>LpCcpvVc15H634151K6ftqawD}(T1X6LM+!3 zeKn}@ygVI3_9R0gZexbCKYleAoy8KRUJKXKhu}ZM*EsCOF}N8KrPCBwS)K!0gRL^mPAc^nZAz&==L@G!e zZ>o`2y9Ompt-7(~9(pdJ`Wk_1b;u(9EoARp_Tg{GdhJ8r`o1y%WR)M1Z*-Lmt8wRC z#?$83VzZL}*pCFzldii+NN$4nuA##ymi@b_$T%1upg9oxqbYRR39W(d{{!SfkJCRt z1o2@Z3!h)Hwo4t7bKR1&34a7^bM&Z?s6tX z!1-9?D22}K%6$OM<)!AZ%j&vIRs;FYCHBm3BJqce7o=ZF4}x0{PEvLCL+3U^O1$KS2Yy)mF2j<>tD z=_B%3L}Vo%h0x|tN3nc3uiih`%GN$?P-C#LJDW{wjGKHg9IyKm4(~>@rDb4swJ4_V zaLmT+jL)GaJ1FW#N68~xW#9`JcWOyIN;chdR7Ii>C{g(a%!{Z=>c0i}H*J{q{OWhy zC}qp-yD_ip<`6G5Lvto>Z321*A66cbJ9lL>Z-iUEFZd66VpuI1OzXoU2e$2o$pgP}f-6s|v1QBwcIHO5 zIUhQeeg}!OVzqV$=_=n~X*WaS-2PLVa=Y=5l6YKtX<(YXhLzt86!y<#OKt|lZkYL4 z>c3SK3nVAM_C)y3@k#tbzUYhv!xn$P>e#HKQ@F;r=oqhKUmZv5I8VoAI)1EUwT>5b zyrrX4gqE(KhDBT(ozYXrY#sk;qY277p3!lijvwoCU+Vacj^}i|rK4k{o}rHIb?mDm zREK<>F;BKNY@=hMj=gjoq+^bb`8v)eME+cf&RDABDjhfL z_>GRob!^b_wvJqsR*=>@T6B!pvA>Rkb)2B%3?Svt73+)@I_}W%h>i_9{-dL--USvN zyXz?Gn4{w)9p~z}NXJz=Zf?WlNQ*zGwptBT5B^!G_uVudmqhUR6}_mCtZ*)kBwhMx zxMs74J6;3674G9PzfohvMgS2dtd0H)e;loe1K}+A65isZW!KP~SCcEw*go>F86HL1 zoaQH8hW?rqEj8>4>R*NKC;giK(sVyG0QIlop(gLG`-^oyv|aTtPWM9_R{!e%)#PC* zq`x9AS7+e)m-^?W`vZ0Vng?2hAl;8&x>GAaW-|I~xUc!~5J~-8uKUAufBipNxJCEp z>i%%mKaArFnl*uWx&VGFOwHg$-5;m>U()?vy8nUhchUWd?w9XzJA(wijoZ!z9(+IE zI&8-H3DYK8Y%?u+vnP(XO|%ruo|!js&KyhOoGCM&v$TzxHm7Z+)yR*otTyriJKr+$ zJ`P^p5;pw&XU%hiJ{$g}d*z5_qbhf_=55hAiTG+3+5Cjc8*tj*)~My_3Oe)&f8~51 z-Xh!{U%A?ck9X#s1kP!&Pj_(^~l99XKuqPHBnn>%{ryyKugV&YW+&S3!$hpN+$UF(4wwaZZPqoMYqReRADEl zKi*m3ocWb%lTovg3+U8}3$=a0h34C16D;98=3Q+LJ-aV^E4 z(;qq~++aOvEqkX&*;6}rAAkb~+;m-L1n6Qo^;Z=T?(O5q`JkPB;*Dw>B2EU^ki~Hx zSU*}Kg@GOp*Aj_u?Z~xG^x#@Ab<5_}j&f2vidXBe7NByRKOgX@p--TZE!m z`%RY*2fY){{|2grKi=fRnQUoBhqMoH^f}GY@s+%qWw2uy1|qYOjJy-vB<&)KZh+ zSsXVAE}PP=yk_EqDvJYn-$yNR1A9rO>jD(dR2l0WA3i159kPWu8sj3uv##<4r4X=H{Pn7$`T8{e^ z4voGgkPoERITFaX9bk4rkHQUgMvvn6apQdp0^;1bu*3~q7{7}1YVvG&JV~`_UJw1B z_r+>QD<0zcA|9=Jl}*8X^rMEFZ~;0mT=dP#_91*|1QnEK!h^qP(1$N^ zT;9#fsUiH3pjPOXF_;O1hrEfiVzUx%=z0AIse22 z&YzFt+*(#4zOsENKdCFpRp6At)3EM^j6!)WNBldM^o%B4lv*D1R)be@3qRTWe^uoF zt0MpZsUo*khw1-WMXr1!oIkR2QUt%kz?y{bTNmeQ+HA`xFV5ZBI+~x^itn;>VQ1cx zFG{+nDR+rF#_1TVW3-M|9W6SVb@bBFMaSm5T6(gM7j&%Gu};TY4U4#gI-^?0JvvtD zxI@QnI&Ri+laA|k#E*Wa(ZpS@<02jB>uA%lCH-ltAKOm89xz(RY#l`%lXQ&M(V}Cp zj$S%8-_dKXBYu8L-7Q+0zCq`o(Gj17r_nn`u2yGM>sY1ZHXYaKSfOLFj+1rF)-g@T zXdNv&nsxNj(M3lNSVT?Md|NAdla8{E4LY9Dv0g{~8m!vXRXVQGu|mgU9Vr7!zd-lr z>X@x#(rvDnDiEzRymV~-ON)42$E!Nl>$pkB3c^auLOvLS{b@rJ+f0Zgn?8&+YEKu`At`Ti$jqtXaQ$* zY)NRalg5X-&yiDe0#;Ctz-dGNKfxldNdH@=CtRQ>{QnlIP*hODC;#eYtwa7Z;*9d2 z1O7Ms%J)+E=Zi%Ae!k5MS`(Zc$a5EfAvhJ6fNue|^2MPHG~GDA0G9-M9&i<018Bk} zFd^rFt^lsW33?6jf#zWBNMIm+o|AY$$ zeH+*Xr)c6Q0nh661>mhPjdN?0KFXea|*{@0DTo`>Y*iM2EGpWz>LqZfg9kcC7OWr zi>lP@gmF4eI5!pjPo=JaX9paIU*M<)o`G`#-2l7-=MK6F_$Qnf=-a^lJ+(x$fWN{~ zjb)(iDXrxQ^V9KPG(es(v={iGEx>JXxAA+P+jdw89al!BK??=M2QyDuR#= z#2q*aXa*h|gg!%U>Vd7E*7)wg2%YW$EYj&>;C8qq$nOB&fTNaZ0=}QE(QAO62BYu6 zpA00EIOVkmSQG$LQ!xU9f#cw)29tp+;Yer|a7GSl37G=mO}J{x061raM%#dY>+}O) z2@cF>AVb)36x4apalnt@Xz*?ZHp3zPB9)+@_A%370oul(J3tfOhof2GJC@`AfwO`C z0GKvTs~KUNTnt9=qk#qEIc^UIAmI`?8h{nRzu>4cw}IXhv`YE{XH7u=Z-US^cpk!4 zf#&jHOUOsfK(7J5F%h)^y%@M*l9t&f;GO3%h`?_K_Mf6JIKY_&7_d|bpfn2^gI)~$ z9FAId2QX!}Cf}n7gvT7%q9Ei2><5<*S_Hle_W+}G6|l2SYtlqu!>gz%_=Iy&0cz5D zz-NoGmmzKruy_HcE@(pEg<8gOz&mi8!EXk3C_(>IbH#!90*;!x3TP|UnwPK}j{T<~ zlLVXzN6lCO{0vU54X{q930E!B(jlxX)7AsRm6Ep5tpYAxjA@Kzd)Z>l|3&568ctXZ zN28apS*Hm4=oyi^`E|ESPRFYX4SykYfw5GEWn@EqxnEz1!jM!t>J_tH)#CPz`bzPD)qqP zjaWB8R{)bX!GX>O`hSE$3px(?3!DgA2D%k}j9X=R+=1=js7XfymuyB~f)BGg_cI)2 za20s-Q#3K?CZO{cP2L6A6OLLg4Jd9y(?f>v7@S%-;QOCx^cr9TIpkGD#P-j%L9_$d zeY-v~fva|-8;}s;%U_}P7%Y zRuMD}u3F%XEBb;1jQIr_Lnaou5soyaJ;0&AAuaF;BV?=%;9G%1;2MhH846+x>A~Hpj5Fwdzi6$hoEzyKzX(gI)kxrAfb(2mLl5vz|2+8nCG@B=a3bCN$pEXu@cnCM3%h$qE*gx+a z4pG@94JG==)5COGo>(`!H+pxBIEw?Uhow$zFf9PXe)tXAn z3ckI-#Z?Yk!JqU@!%r1~U9fiE+Tyi~Dm$;_eWDxI%4$59bzbYt>n!V{*Tt>NUYD~j gcirT51?%RmD_*x~UB$ZP>vqms$-gPQ73ciF02hi?JOBUy diff --git a/pipenv/patched/notpip/_vendor/distlib/util.py b/pipenv/patched/notpip/_vendor/distlib/util.py index e851146c0f..01324eae46 100644 --- a/pipenv/patched/notpip/_vendor/distlib/util.py +++ b/pipenv/patched/notpip/_vendor/distlib/util.py @@ -703,7 +703,7 @@ def __eq__(self, other): ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) \s*=\s*(?P(\w+)([:\.]\w+)*) - \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) def get_export_entry(specification): @@ -1438,7 +1438,8 @@ def connect(self): ca_certs=self.ca_certs) else: # pragma: no cover context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - context.options |= ssl.OP_NO_SSLv2 + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 if self.cert_file: context.load_cert_chain(self.cert_file, self.key_file) kwargs = {} diff --git a/pipenv/patched/notpip/_vendor/distlib/w32.exe b/pipenv/patched/notpip/_vendor/distlib/w32.exe index 4df77001a222c84ff3fef542618b3f45f4c1eb9e..e6439e9e45897365d5ac6a85a46864c158a225fd 100644 GIT binary patch delta 17543 zcmd^ndwh)7_V+W7TqZJ+h$M28L?lEIbI;sokPw$JBqoVVP{IhUU`A<6=!i;3>asai zR4GMUG@80AiKu9+ZBFa7N`tDhiB`0#DQe#DerCiu=lA>L{pbDbO+Rb)+H0?UTYK%b z*IxV4#fSVC*ZQwAh+0+~v&vryEm2*cq<#WBcKxMRBA@Z%XX*$c^{911`nh@lQp4uo z)Ka9X>l4+b0{tr=|3hE;NJvLbpKNDsa{u*b&E~jaB7g4r9)FKxMX~JX3df6rMeRB6 z7&2QJh2sXuNwO$|-|#LOCF;&g?vWQn$s#FvQ`AjVM(RYJ_=%#`U1Ld$NUXka1pHwD zR^1QqZ+}S~H!)+*lu3?BVCjv5MnDFjmSpffM86Unejk%Xd~Am!9YEe9@-}6y*};zy zt(J<{_yvoG5s6=HTu!5f+vZ5^$5qriV(o{5Ij)qmhjAj#zJ>Jji!<5mhFccS6FCfg z?v}0d1x{y7#eIj`J`N=|Po&Ui! zBT%ih#N+X}PDPiBJdqshCijuz3pyT>`;-*=DY`uxB(OeYtTm7S7goRaWUpVGNJc*M zQy30mP*vHF6dt=ht{-QuT==MRVGAuqYl=_x4WVo6?6u$+Nu}ttlrQi&AJKIf3rp7% zt$#{KyEkX|gV0Jvd9<87=Pwl_n8vUb0Kad;#Jw#_nXh2-w zjaYg#b%IqcV72!}2pyk^t3A#&Tev^Z+pIJIlbGrbJ`~?fiUN{DhIs1;>?LG*Kv#7< zIH&46Is$X_^>cr1ZdaMz!p2u0&vEYTmZAy8pVp6d-*(-W*h(`zPO;*CrL&b@Czk`# zdMBZ3RrWnAL|~<7gqH8QTUqVF9XZZ*DnkF*Ik+mj)p|nOOca4VBY6zabxNW??L61q zPH}%AW1UK72lfcb7nZ!@zEk3f%p&guW+sa{ZmS>Hk5ingT$*woId`@=&sL!D6ckiu zrNnz84diiPRE!c8p-vLlODSPxPJczMrI}aM+BMCjkGQ+05F@nGM_A6%tYTYLR*JH~ zO4ke*23P4wNpq_#s9$>D^Q{$yQ|LWNtHCd2K5>iVMQh0}aeR5T(1aUJ6cv?<>?IKv zt|2RB9JCgiYDH~SjApfaTMG&x*Mb6%1KoHcBT%Mqa2hcoUBG5ehM?xRbB(!~G>;Ye~^Tt${nREZ?hzUIyP%5_4 zH;Jizw0-H5+G0iR)XD(YX}*B=edO`v{dwMg8_mRdO1Z)%H_E#7<3iWfTIrU*gt<<2 z>64w^QN?9S&I?^zYft)xP3vWpdfPcAK~t`J-qpaDhT84FkmK#U_FXP8^%I(qU>H*| z>h5eTS3`Ysh!$!$cVNUXNQdAFVM>U&Ii;$MZ|=y5#bjY{I!xoPU_)e6tH1Vch*4c?iNL@;-da{#u)!A8)`k7B1x5x?BMHIjNwkz zCOB^hbLwW#0TbqBr7yx1L7Vsq-a#diqmtOD`A!&3u%@}i*^gG`-85i)mEIx z5bfs~Hxu5S`%v!Cv3Ug$h=rqr0-18BRMa}>2}Y@^;x<%QFfdlyHB#7rUYgYtnNJ)c z-2%O0t|Xg75=DjNct}-V#2q%9fBgZowf`J?^X&gY_+>p8*mF>7v*pnrgIH_QJlY+? zKTF@k3ZK=7us!SKf3LrJJ_ky^zQ^#8jaHCPhm=@YW2<}N(CaTQu??myTxi}?bGnC!bF<4T0PD%4CMi8VYgIf zUu(#Iggxw`7#l3cHEKkc^{`uXCRmE<-5zWTo7*Xdsp&-B!DziSs<={0`zDZ?p(*^> z1hOVn*KsD67kU(FKZczHLFJI9(4P9c$*hS|F%1NWU<%N3-oQ9#7;C>V8}iRf1+;j9%<^{Lrl!JKK29d|QG2l^js2 z(>d7 zkaM&)`;ns)ZNdS;CQr}Bl9*0OJVvGO)IB*(4Gjoz5A@Uf&kyRyIilPH#qN-@_V5yN z%Fy2{8N}6TU`R4*`&8$X81ikWIpIfC92a`9g(*eo!9Xj0PesN=6oqJF+88fJliG;N zp!%MWQUVQ0Bcmcya_3{G>J;%=Bw z+#HcdTbVRyCTIJ}^S{6v+$g|^=8&CUm>*harqk#kXb z3JlhL298H%Zma%tXJ@(}Obr~?mZR?hb#piO0@vn>=XMm#6mBh8 z+}X3(86Bv4O-U{F z6}{Z4tH(d@|2Pi zPUgq$jXMoz!#B?9%^4hTI(2Y-NCPv#oY(iIU25J;-W z(91YIe676 zP6wk)lk6N%CjhI|H0Qgc3m7B0o7+XI@RjzUJy~gVb3#S!f-Z_rr&e}g)x&5w$PFBf z9Y^w!~Eu#ueFU(?BvfL7KW7It#^kLCd^Rr~_sk zx~{IOpQm_yJr0CwCiOSS=7cGt1*Bb~D)PG_9Jehx6lVSIU}ho~1kp)@Np50nYC7^o z?$+isD@|mmzF}UhJ7B8IFyH9x2zMH1cWw?@k=QMyku~}<_GM`x%_PSX^Fm(E;kc?Y zQ7LbuCtFB-(uhuBD6WT(R!~6CVn2X$6IqnBZg9seD2uz5?#4K5)K9_T?E3-T;XuJ) z{9Rvpn#0`#+}Enk;6QK&+u-oyqO+7lC+C)56l|tb zTpCaF*vUYLVY5I>#ie1izd&e#@u-9wy-zNPPFxyCZ~X#JPvl}WyX_R(q|XnBfFqcI zKg{)UiRmfEMSBhP5rSw9Bb18ht8Ae9(+-$a@fLw;*AQ|$IX12ArpHs6eMRfM<_xgX z?kJ+U_-W2?XWw><-9eSPKUhc7_`xKjTU5M+75B!E??8sK({4dn06KO!nb_@A)_OL) zG7Jh`reJ_YrSNXof@r_?Pq?^|tAM(UjDf>+MU-Pa_8>p5))N_xG6-%3v8QCj1+#*p zZQHxEd!;j>)X!F_s%6vuEtl*|al}=kg~diOYsleG$AH_TV$5_(GqI$mM=lTMxFy3S z++sA-+}?FM3DVz0%2T7`#-hZXEm@M|EgWBujH@;YJ#Qi(re<`|VhORLUeXrwYiidH z?f-*R)1`Z(h##F#?(~p#wclc1t^{KYcAaj&Yk0&}%Eml)gEl?PO2i zl!R(b#&(?Z>&F$iO%)zTC)XlBkt3wQ^GyLgdWGESn;pCbEP@TN-X&&f&$Pu&5PcCm z%qry{tfDr-;pc8>ezr84UWN+7oarArL+N<3N}AHcyKv6V1@snXSwz!dPrdF%<@jsO z!Ih@wzykW!WpYuP6)}Txc+^lnCSi0bwwDowtXFu+FqoGF)U3CMTG-avMapDTgUY~G zUz%iJMJ~(cN;bep_Z=%nQY@FjTU;fN<##vL?3PPJ$Q@QB*mkihp2$N;PWo~1Z#}9M zf3||hKykOgmy7eL&U6yiRG_pm)Uxd9;`*&g;Acl%l+~kZ^m39u%9xZ>U~q*9t=*i5 zv10VqOt~-NQe#iS4)8?IB&A9VVjJ%(OGH_uqbh00JR?@H1^)1ThYnq_wmqt|ZT)*1 zVUwdvv3Xi?DB()_ z%D~=e2j;HHn^SsoUwLx|Z?4Xpvv_kg-dvV9_l`H0b+o!0V zP;Yaqa>y@gy{MM-%?^8~jfJJ=WjCRSJUI=3RqBq%5;rNcp!Ok4MEA z+^St+)IOkY38gzMNNs`gPGed|2DwMuyi8p#HE4n`Sgw%fRiT-J1#ybb>V z8>I#2b{FZWO_FfGK_3Es$GP#zIbzhN@!QXlY1)Lqq!wI$l3K_LZDL>=!_yd^KpM5d zqFv+%?e4%N)PxnOS*dF;>SSP^f~!>&x-<|LptK$BHJI$y%fNa`&jO$#LquBHHJ>eZ zNEEeJ_D7R&u-oYL-W+E^ZNt2D8y(l17!7e_t6}93%)&H3w%@*p{v%+J;azqU=**zM zA4FoN;jkekzmc$~Ps8B#xWV0qD&Bb}_@8$xmL+L$Fq=nQN^s2q^KZ>kX6=F1D zg*47XSj?yvGpqXiSIkYvEb5r8#k3re$b2-2bTi7~%-W2-L^H_C_?=Jo8W-_fqDY2m zj_3;6Y8uWrMUuOwUc6*5NiZkyCk7Fdxf{P>5SeV=9{l22ph zB~6xO(X}=G-tK)HQ$m<&sSPDn+0+Wvzv3fhf@Bvl`ha-EE4fbO%YNDX;cyb!|6%Z) zPlZuN*eCu*;s#{$VSP#d09}%AlpWmz?MCl75$OB^mWoJ=%VljStHCX9m%*ws2(U)sM z&U2s^Ir=kDc%ANL=a4hb#~s)ar(Mr_W_`ZW4^lnAis-RV$*%*urM(XRBJG4nj&S!N zKMXDygM-0Y%*d{WFi)=7* zPi<$zr&looT%cB=7x%(2L`o$3hmIKi@3uBwy4*b`H^xI$~u()?q2)UT0iC2DYn< z`!u+&=;n4%g~-oHHX)nfh*oC9(~uPHdKs7Tb5~en@K11fNRrt6`$8diSsWqsFi{Te zpZ3>z47D~%-{O$aUj*EuZ*hje6QU=Iu>7zAfi^m$h-@C3!A~nBUk)A2tE?nuSTf&Q zK#arEqI@x@NV@29n-sfVCXAInw25r8#`1p-BZsUzMEA&qk^6)13A@zUo<-#oN16E% zL&?NZuk!Q)xjkwa?;JvOc?mrx}4O zb7%VdcWu2t{En;~6O;Z7>eQp>JUXZVYwh)I@fv~oM$&45oi#}Fz9W5YDUxzfyfMXY z-;oKn7*lCaukmkyy5N$7P(m8hG+c~Xr0uBIDVf`Ws#vY$qDry*Zu5twQH5~u&I*5| z+Ljdb5|jbAmzx^$y=`I$yD*koN|TChbU+s}upm*iluRvnt_zdG!Wk#FBVomKTRgK& z@JdFy{Fjou1p|W_$4hTF2e`W|CH)K2MUTj{g(~>E+Y0kKS%69!F_?lHBM-c1v}aL zvWFQAMq;Iz=g7xJ>BDQ;vP#iH+p=E&#=ESaBlT@aY{s<^SH;EoV!bfKIkEJFk8OJ! z8y)lw);Yy5kJ?5FYkFU>G`=+*@|17XH^Q182F1Iko7rT%YkJ=zu{Z);^TnJa7M_fl z9)f30Jex0f9eEI5Nz~WaaTS~3yuuOgMFWxb?X~y*Njz_T!2de8rh1E^o!{CasMfM= zm0u%b5M=3gm*3mTK!V?S#_tQ9;$-$aYy4)00}0&GS5#J$xg(7CTZ3b!X6UXDMSgh$ zP$GKoeLiS6+56s9??PLO?L zC-XB-kf?D9?Z5j3fsTLVSRs~J+6=Ofz-2i^@%Sww4Jj_x@d!|DDDKYB*-1_oFHZX6 zfUxb+$6!~zUzKtZ9-cVoCmxj$i~4#8d1gXNx7|Cyl-k+>`;Te(wI}YPvJS5u?1mI6 zChtzrr}{B%Z%;Tj(Ux zr(1~0=|8s60Uz7OHa2Ri^=+YCGYOm2g&$l?mQ=^5*Mh^lRo-EP^llY)oZz3zG`&OnVO#`Sw+0)JCZqB#$U-I zrISL+;%$;NB_;OPt!&v^{>QS-J4TA96!EJEki%1c;&=Ban@hU#mHkOw zi8Uq_mPmLgJDvs~)|R)tOK-gDdm<$yed>CASx`Tlc!>%G2BRoqLDDE?> zb9f7uv$Y6umL6JvDTXjF5o(q~CfZ|3hCL=`C-W#%-Tg+)SbR=u|tC-47k zL-0<6Zvah*3 z5ig|)ai+Eq6APIL6c&ftwu!8sp3r_Anqfxdx1;3n^Z|TJKk|5b|ER$*s+G~M4u}&1 z^B$Rv2Z*ioY}7PtO>fZ`K4bFKByZ0<8w-eLbY_ z0L6pKL>L4d({5uLxjJKE65CKXdQetGg9VPt?ADu8p<5p36?WK_&rIne>^i*X-Teq` z0$tdV_nmv_p^r%I%oILQPOi@^4*Tb2-y$HI6GJmUBKgmD%UJy{j7A9Y#ZGlCJRj@q zf7i`24{q9cK}lM!_0V`&T_`MmVwyclr=MP}Oa1!LObv|3|I|&rEZfCUqPrQZTNwyQ60=ifF z;%bsGdm>*|O%~6V@N>y4s~W^=PIcVpW_kB6B__c`7911rh!ISPK2 zo4Dq@%#y&l{rd8dFSfI|G>ZB$QQ!AZ!d~Sa9r1|;o}DbX<0^Lx!UQ&H4S9a9Lg{sa zIu+PlH$q+4L-mh~Cpi6Yj?i%2pAEQ?bIypmo4o64JJ(<0IX!=W$CI+o`Wm@3H!5LD z6$X@2HSskIw+iVsWLG)xvjuRfiiA6Q@Zi)tQdly{5ua)WCl1^}NR)uyfD}MHAOau( z_`OEncH~4bPPd1(YzH_1%U>gp9aBPXuVe*(evke(tt7LYF;OijM2;1#;_#CK7`BpZ zb|&(-SCSLXG?v_SN>dpJdf$R%7oZw&2v7?+184;Nw33+S4QmBafs0e!Pd*u?kfS|IN1 z)}pGCYZHpcdcXOqtY{m!@ct=g-bci%WYtR((e2%rEKzarBL2JEX>KxTVFG{SESa`2 zS@{yggP!u}v=vV}xZT#lxTXI~2RD|Jj~8Z@_kh&cnVBf0653fv!)U0GM$kYZji#Pg zz^A6IESZ-?TZA-)QXx&FSA?`TJujqE+9;$-dPYd&X|0fI=pi9B&}yWvQx;G8JPUnC zpk>iFg*1n*6VhR{N=U7=LP+!JLLn`rP9YsfpB2)Hv_wd!(D6cQr=x{*Ce0PnGMX)< z4r&zA`BWvOFVQ|o%h@0fEQT`CL`FWq+!GohpvxH=B%mu8`gj>*SjEua1(e+eC>7B4 z3~dt7O$_~7K({coK|r@N^q7F|V(2~rJyy=}9Rgm<(2W9milM6n^bA8Q1oSLJUjQnM zxsl~&3I&H4I!Qp!Gt?%aKQMHtfL>v!ML@4HR3)GkXgN(6@Y{@>D4;@ECqh757&S;h z?=kc-G}UvUm7%{2=p%+w0cD{!+9aTU4E`*0ncLOL;=lVXarDU(8E|RKq#;>^dWYW=RiI~TLiR_p+5`gIEG#j(1{HFYAG4J zr0bB~pcDxomKbohun@!sFGZ~PxA-W_y%ee5-|nN#6DaePdeuyUVxv=*lG964%1vI@ zFui|{kF~d#5~24W=A*=VDbad=tB)e~QsVXg`98{hmoTg(y?>#P0yl~cG)3<}&PVy$ zOVQ~4%Y2mDv0jov@9*%D_IfE6d^qEyZ1PgF^!_jTD6e`cIeP!aKFWN7GH;k(wN#+k z=u{Uu^|D=J+5E&RUwe~iT-GTlxy`(+$p!^ubEj zdk}VmyTSck^AO<*`Dmoa*$VfXg&$y>nH%H1Ou^j-Fw$Zf-?46}1RL__I(5f)LYve8b!L=-t;yuQlCyt^>(i z(e7r zyH#W^(ea~RCm)b5A|?5PB;qmS7jg%8lBTt3qVptRT_JuateeE2iY6!5_2OTTCU@2~ zf%e5ezQ^y**S`Yq^XsLZ*$V@U#I)O=_Or*Mzf}Htv+&*R@%0w)dDbW3{Vm~*9`Wp$ z@HMUO z9W5->le`QR@BMW~K->M*Gvzz*MOb=%_ z=C((qf)cW4(+GaYbn`TxjG*Pd(=w2u;ZBO?tB?I@QDz~5v zZg8~y46FSHT6X1L6JlaNlm=55v)za?h?zD{!pQ=ftb)9?XHXZ)iWC{augPsrlE zQ4r(ndrR7n#kpLMKJLCu0zNo7(g5LaW0xL8Pq1xT*bvo=+O|z6`u)Nua%KY*4Aj#o zX*fO7R!qkOZAQ#~VhoX2r>0xbGUgTRHH<9o?dK}F#<><=;~X(`BADH|*DA$yz(N)Q z;*TsOZ&fELPHe=S@nGxD##79qU_?lx>H7i=PJb-y3EuqiK`27N2ms@@f!4`y)pCC2 zA<}){iUseS zol5=a`a1IVhy6qw$+ZvD(k3D1P}Hv8O6{n?0w(&$jxl(?W-r*;zOn+^a~4t7^z3QJ z`axi!uH1VbTKEDRa5~0VK*bZ#iq_fc=)medC??Bl6nt?VIa<@7kF6t~nm$3l&4=fL z!XZTUQ6hA5^hc%T0}z|QmqWgwDc+I9boY3Wuzhk1nx&ABXdndnZx^;H7=iF1z>{xQ zEjBtI`pUxlU0jc^%=_F%?JQ4sqQ57fsIn*_{Raq7KhPbBTvQQ0khn@0{YOE@e3HLE zl^;Bxytuy$zwryQVShK#&*b?2w1k5(RGa^ao$B=do`~>Bbwx?|?jM z9L5LTpO2(6`w?W-nJ#$#$w26H&lB4L1@EH7eIO&z$Lj#E;^NI^5F`x(uS?_a45qi| zk@E*q`7PIp=Rmw@B8fj36HzsfwG_e_*8vqKLk1sAA}k(3!JQ6P@#BdjqT7# zQEZ&!i8pg%E^3@^hB%|y6;DIz4)=^a1jhxD8rq0L-)Fd=%q5<~36gg}DNSadgb$ue zdLGGTN9K_(I1twzNfP}+4jj>lO30lf-FoK;o5*o>Dh!(M2JCxY{MQC1t|!lnF?87+ zVmcbfPoG1^9?eSIQHrj8Z;2ZOyzMPz;*T)0e?QXQx)Zrg^mGAn4;|>7M0KNsZ03HLv z?sHrUKnJh^<^YxgHUO#tX8~6M_W=Qaa$FQ34WI#70nY$_eaLZ_0F8hHfMcLF0t`yFb+@(Z~#^SHUT~a zdNUmioODEviB8_(j;;1Vk>tGh-< zJ^4p~JPLCGhwcC_*Bm+Vc69&TlDTsyO)Kd?YkvOpXG>-|9R-*OH;HoqW^zefHa7?V z%>pEG)4AvHI}9aGV3Xk(m4IF|=6r@|H0yUXsFS#7P_G1h4%A8l*BsO`WcIFDLP6OV9OGVAONOi~wIN9_42$FT7V@<~kecQ!{wOBY7!5fvAm6v$#kKK#z z8nP>9{8e;)V^JL$*Ob+Mu^w{5$6%}6tGh;#+NNl}XFR#kl+Z3T(L#$D2;7P``DVGRp^W1VQ}QVhT21(FU17h~YRe^p4rb_=7uQX@3gci<6= zQ02+M&iDrSBp(cAdGa^tsL*WB-5*=H92QCN{gp9=Lm*>4w(pa_YS65t?#Ismzi;%h z{|`4h>GRV+!-_=%XU%zblEb$>3u+$y_vb^L^yy?1Qm z%ELq)=PwPGhD#Hr-KD*x7O7QQES(~qA)PNRmoAmQA$?bRO!`n7Da)5RWUt6RkR6u& zQ?_2-Di2bORcuo1RvcFRq`0H-C{)TJN}F<)@+IXOlgx8B|58d8!H( zQGK8~tol}UL-knYr|zJRRrge9l&dZ3;p#l~PW2J>O?6&oab|huo0+wlF`6El;hJ*I za?MK3cFhURH=6d^46RA)(jL%0(uV4ibqZa+ZoY1_uD5=O-mago|3kmiu*Yz~@Uh{t zA>TO8ILr8gvD~=YxY4-9c+7a(_=E9=QDl;sB1|c!bd%oXFfB4|HrAW?$FaznLHd#OdnuK+NdJ}w$s%QOvOY4E%mUf1ko`k;Lslf8ATN>6kk6Js zFJDwHuaK{h6Zr=DR{1V@wfvC0R(@9gFZl)e75Ods@AAj;_KHwNq9R3+u8=AQDsmP1 ziZKd@!lhUx2){;gQt>7Gx?9mvnW$7MP0A_C*Ofc40N*HiRY%pAYA!P|)0DY2^Hk>b z%)6OAG?|)7nmLf{GR;3UyENr{G=FPCwRg0CX#;dwx}my4U9oPX?x601?iXEGy-B}D zKhxkatT5~}JT{~nWk!Q(x~a_cf@y_mn`xivpy`6?N7DmS2XlnEi&<*UH?K8QGiSkh z2}y{dRfDA?r1{ckq|>A`rL(0=q}9>`(&JKpS$kQQY_QB)E*mWyCz~XjC2N#@FS{(8 zCZ7k1tdXymACw=Lebt?^Tyo*HyPwzpMUIiPU0sh&oc8 zpiWnp_f;#^I<-lir5>yvq0U!NRxePOt6x#CP_I?Lq28+Aq28lDrmj;rs2kNS>ig;r znX*hxrX{mFbARUX%r7!qGXKbYkl9gFqAAt5HLEoHHAgjd8oTycZMpVM?LO^6ZBLy+ zH(a+)w+B-F8gjj&`&E~$@2k(!JN0YI^_TP#gUZm)@QR_z@VcS5ak_D_vBe}Z>&-dl zQRa!}>E^lSo#qeC$ISmSUpD_{hN4+HoR{#0prS8J*Fl)cvKg`qvcF`{$zPCH$={M6 zk)MR@euT`TpgXCG4243`Uol;=MzKwCP|;2~O1VM#t8yP?QeL2*t$ta3R9&aJrCFxE zqPwB{0|R>7@QtAms~T-SYrbgaCgLCQqr*|s2~wrZB)cSwmk-B6H7c~q3Ca@XO657_ zMP-XJQ`Jv3Q?*p}w(7L%zDlfCtBceV)LS6IFVr{GF`4}`$7r&(ml{?ZHW-==w+$)A48amCHLimlm}s7D zUSKXaFEzhvzHfeH<`$Q;|M>yDHQ2os()H5a(yydH2z$3x>gU_H6J^t7PT6kRep#LD zfvlZ8R6a~TT0RM)-YDNL|6G1v-UJ0bh}plb+OIkXHTz2ytRAYKp|)tB(Qee9)Lzxz z*0$57VRyCaUef=l|3zP7C^sxO95J*nHyVuj#<9l9M!RvQvCQ~}@r1F-c+E(Sw~Z~v zd&XAdBO_-TVzQd@O@*d$rjw>GOw@G8bkEdkdSvQi9)$TU#(b8WSD05}^Q|{;GG8~} zHn*7XnOm7;SHa^%VwXNAjgV!^`pa@;g|hLo=VbF_FUgk4%9%~tB-;VIenfUcc1HHK ztVtFvmt&5NitiLn3K6XOPs$oqAGH+*`kd}M=FnarukWwV*H4AneO13nzgJ(Y|5|@d ze_t;)#2Q9oaN%YR2J(^lf|=Wb=U5EjH~CG)3#yM*H&yM`zhoBa#_J~ON_5l9bzukfx<|T5y+S`r|GGZiP>Fpo45p^Ublk*M!`DRvy`=%NZ)JhLengyE8n(sBYHGbL@?ep5Vw1>3kv?5)GPOF=S z{ub$u>l$?ueYp%aZmNF0ehW6@*M{#5KN^CKy^NELGmH<7$tIg=swvH!Vb+-km~G~D z=B?&xY($Kvmg91eOoJ`^M#{^2$o`N$#NbSFo7|!pq^MVXrbxv$+oGJVTAaBVR^so> zc#Tr4*Ouw#>0Z<=(XG*at3v}3joyZ4%F1xO&c~W8m#uhxcAhA6j0DBI(I3|Wp8|M} z)Gl2v-6cH({S?bmWNBDRyKE^c?83U8leNemVda!^jl57k9|rUgG_zIir$|y*6cZJT W6WWOT|;~YRkx!0p^B#M5UnUBO6~78?_JSPpYQYh{`&p(%k!L_GiT1soH=vm z%*>g0%Bus)_XI35NE#O#b4qH%#;Dhi%zO^(>Elcl@?G>NGb6>+lbI!^UuAYjYPj}$ zrhrtvenjT0BK?ey|A{YsDy9R+kFv2g7pKN7ZjiQa6S{U7=li?P@seOkbB@~!!b&f3 zcvli9>B6s^Px?sOg|4{IaRydkk#Os*i&Gbfeti25e|E09g@sH&%29Xf~@e)5WGe9YEkSzgG?etKB@`S!NZ0TX! z%-a@j02MqQkDV{{IG@syu^h)zJE;#yYoYSyZ2l0(F3G0@NJ=w7Qbh(dOKpEHM&vB8 zOFYg8v>}q^9=IFmrDq>834n3{PRoMZJZ9D%*`HIsj<_p8ip zU;@7z$8ql5hN6<<6E%a~cU*U51@u99@nX`9QZlm?ko!jK6zz$ItA{bewKjyqp@27m&gRyEz8)^FxD<;C- zQ1Q?y^F&!lp;Xfvv$oow%jT3vE9a$MM$Vlp%`YfadI}3GbJ7w#QChM^8XYU3A|_Af znwb_}>I_g;Tk3gbwe7)e@`JRUHnQHrS?SX!n2(&|f~uS}RiTyU^b>`vbfl%b)dtkJ zKlEI(qHr{Q0BzO!r>TfBC_!Q+LxU3R6T~LmXriddF0pNouyD0GX~SS6oF}R;YNKKd ztDRvjEQDSQ3q1~W5-N>8GGvcnJMm{|(nqoyYOflca z-C?#kg&b}k756jNP*D-|^jtytwj2~&X;0FC;@!St=PzSjX3qV{5gWFA6>T1L^JUoJ2zy`FQtju9BkA|&ph^cTt~yly7aA` z?NG&KYMmdpwA!{s>~bqgz3m*8p($4l@2chPVYbFQW>EFs0Rj*<*gDKkXL(2)DGvby35TRt|i z@!{k@GC^N1;_VvQ#1>XnJowAwu^wy2b-G`WZ-bn+>u1X|jVbKOfBFEKo(O8)DVgme5b^V*lYPVA+`YC;_ z;~KP~GeLIX=jk8te3iLUeXTQuHYSjBVG-#+qL#M-`?IRt2CI$rMkA6%7g4|3>T!lc zus@=SEIg)z3BA?8{m^mfv8cxF!CEMAJEia#5%evzWREVc6lli;k{6!F7bK9$;aM$g zm_1lelx;uO3sjUt4u*Ho|DM8HuuEyUh`2okbXOa=mxcm56=?ze5waH0VItmB{57O9 zO50S@x`o;Dv;*p++2#<(6LmE+n=7Jf&}=i&i)}Q1=xV5$EjQ<&f2)|zoY~Cx*b*4~xJj9pMpOeWSE!2_;q*Kd`s3Q13U7+Jv{gX*1kH&M! zeVO)XO(wL|O3Ts6oLO}X$i|kj(nVtC`cm?BOLg)y=%|S4=m1RFH7lHRNYN~1)Ee47 zj-*E9H=ogw?JH7RlTKzww2LbhHROybps^UVy8+G@6e~RuLk>nHC5;ESy8#{xYL3z6 zK6OOU%W(5-@bpX!`7EeMVE=itLV@)N-GaT4VcwcM#w z99Q4MN|#}lia-jg<-Y$OMqe#;=QX-7S6smo(L|EPF>y%>wiaTb?iTr?5bLDk;&3qe|rjcjyX(8*dgZS8chLf%d2_bL3pd^Kp zAqkt~tHYXjMgesl_uLu-?H3f%b0jq}T2e{0iBtHPFtR0akYqmzNK#4OCRs`Sg%l>= zLU?0%&0Hxw4i;V%Z4ZL4%ab8ubM@)W^2Y~~FO$+jP%8S9&4Cy`X-MfBei8E_$(6ar zgmXm&bfSTDO%>WM(}>y`p-1Z{pc7Z4C+xk$RoR~6;WcoG7S{F0WM1lMNg277svb1X z!f|V3!r;i}nAt*^9z^e%7~*`%t!@m(a(L*-cFmG<4m}fQFUpGCjrHkP`frBnYp2Ay zn~ilDrWu_raDc>S_PvozYTYJe1nX%&Ox7Mqjbu&h{E(KYP*p0i^98i_cfz;n9}$h> z8XQ6j3+XAG1i-n5jBfK@^lBZ{=x(G22vZ589rL5Eej%iv|_owabv zhdT9fct&S%0S>eUo~Uy2EG@6>1`d@_h18xv^O=jJ1F>dALOAUv5+1VEic{eS5E9yx z+T-cnyWsRh%|x?nkD^T)m;)V0G97p8?&Ffu6O4=Q!wAJ{L39rz*d_E`CaC_H1HPr~ z6Ol;^riZylC5~ocyYr6M``Mcp4X_s2*jph;z-0M1LowJ#hb^<>gh@bi{?%X?W zsXM4L@2YhG-N@z&yU-Zw&b-?hWXCyU5s0?Ba~7ZrfVzx~frFwXbP?mR1^IK;o~RgWN?zE+S6>8YXN|3%vF*Ws*$|EV{* z*ip_;yhEg&68V02NP8q*?~opydh)@&$-AA@k<|@QX^|h+&K0Gna#kIN6G$$1%I5t7 zNqmNxpJpP%GTL-p1*s~kS2J;)5gyeR`WMBViTTQeE%otk|1Dl&mXXghvSjmkXw)sX z){5NCP{*w|LSWY?*Kug!?0u^$>VVi`;{_SS)Oo9pY21$8!mH-0ipwa*$e5yGd*Rqb zWUeP3M_@kP%6y?a?`eth>z8Tvk0iZIo5=ZKEB0#4qwdu*8&foalysRC8IOrY{bqX_ z{jxb|OIkyvAIxl#v8DMelqNberJ?2EnRVg)OLNNWd|R*KqP0; z*c+seFs~)K?&W??c3mgegTFd=Z(*24v^9>X-u51~$CeGd*iBNTF`4dc;CZT4Ht*eo(jCI7S8XGhLg4NhlY$@cBYO-u8j;lT|uOTM2 z9LJ|pbsYc258D^0WfJ6;XC@V_V^us+JCGdl=WyWjsM7-2MD77aymiKV)W;*lsTC|&PNWFF?-c8$pTDQF zS$$9e)qD@C@*%F6$H~ju*pNT2BG05m=Qc7&n^1OJr1Z!@J8*kfy*ZUPciNjXcyra> zoW+~l=FR1JbL+ghUf$gM-rO*wmRdO0Yy~6OL+I&0k-Ek_<=nGvu%$T26?GN@V{L@Z z@mMi92WET7RU+3F@Z42Wsq5T|oy^7KTM$)Y+f`PV)D*bYIpk}d9$R}*R%$yoHEY?N ztY#{%4LAz)s@cL-ii%q6or4|3n$;#`_GP$QHDhqA*OBrpLwjZ;wvV!*eUa?>)^m34 z^BK&sMZ$l;6A1$OF{_j06baL}hM_3*a(<|ZjM2B&Ug`>iX2GnEy@s?r2WXftQ`=fE#jSA@fRP1i3ZJ5%r zfc~6H63y{13-FqV(O{SdYctqmV%B;C8M3!ZUWY*%JGdyo9yEuHUwo+Nb7$Y`?1XI^_OpqT+g04lWGGP$~>%kXE`FXM~jwdchzO89ckTFmA&O zy8ILw(mS~@92ECkxXW;d%JI{w)=K}vsN8Rz_2up&=h;<_9IXS2fROvG3&Pz$ayYqQKar&{jdX>Her9om(Cs9$ zU$^x8aHcqH=^Gp}`iqEL^bO7soRO%xKP(1rA+Uhf_an3Wb>V->Bb)p6=6B|j$NgIK z)AC4S|8&xj6qh;fhbEOa2g>?kM)yMV3DA5DwY*^a@ft>GNiIpPW7lH1DvzVJC$DZ= zAkNV9W9nN}g3L0^T6(#v-Yg!)8sA#dp6N-xtY>5$p0_1^yU|srNz&h zW~ny~1tXiK;nW`(W@(T(OK*NbW~LqDw?3+-^9k{+_(tU(5gY5_zxxZIc7rs4E4>WTd{CfwpD^a9lo% zI?j~p)#Yuoxk<^i4>&4gsL;enzqvvVSUUuTcnj`dA$P4K%hp~I*(#CvG?W3fKGS{V z9Iea?*B_pQ7(a^?&4(|=z1%g98SnHaX9iSC8c5N=oxu&FD|9wzA^q5VGk?oO zhUUM+KfO$T&hOhZ3>zAA3b}QihBus}*kfggax~xp=0)taGWP)tPW3!2wHgLlc zV+wtx2CV5tdbfZq`ZR`|DhLS)d{Hgz5_$LK*p4$nseu&vR8xpG)^tC^*rVvt zi%km^Pe%;*ED|fVq#=_Y$Lh(K@GWQi}r2ESo0(MDN~Q3%zCu?!<8?neirU3dhT>`3g( zO{=6n4ukihYy_$w#Kgz-tu^T1Ps!XLjOOUHNWIt%9p-}+Ahx(B|n?AFg18QT~T z&yAxmzu~l4vr}HIB&nFOdTiQ$|*Z zXN=<>HUVNZEZnw?&A>;XHEx}Z3`yi zk|TzXl-0f+hqn1L{Cte;c%z8_zCQ_@^dsNck35)^%3th9T1~db z{tMfs_?Ue#9mnH>{P~F)@od>jN%`bu5`PlwXfKhGY=m*WG0Al%y~gQV}CoEPfU9Ir(KuQuFgi zyeranjD$_?&KvrXK~uX$e|ebg&#q7pT>;GN$1R7+#;K>GzpZ{=v=^ASsJNOeoHj*u zh|SAahydtooT=1^Vi;g$5={YC%i&oCTNzM1K#agjM{Lh+Oe5Oqvr?FiTj{5Vpnv)% ze%&|^Hr^VGE#2e1&V260>1m1LLcjyqNX918#YOVaxrwG9BGEI__1)E~Erj9Q{i<^7tSE6`C-RvCqT`>}2%Faj=&6jIL+~sQvxBCaP9CNEaL8OZE0%=K4&eXHBGI!u%Lng;dxD3) z%q*iD7(@@v9zkP#kDz#)yZ|QeL{8gF9J5F8e*4I!*#iH*o=D4zqrG9e!AxJa^x0U; zu;vvnk+;g)@#+<1bD5GCW|JSw=CWj5d3M*i2R$C_X;OPMod>k&VL%F2EbnN6PZRKR zWx*wS^Da>Xz^<<(-i60AKbUB$cr#%nOf6Yl_$>BK( zZ5vQf)tmbT$pyeEz)`?{zz)Dhz`An6&+Qe-INcuBvJ?;rNS{NB=Z+3pFqaiP`VRdW z=8`jWW1|sK<^eLy#Q9g5Won44NwYj z%_S>cy&}DxtN>vhU6y<-<578vNBC4*CnqF-)%ZZpHTk>zjo;KywvH{R08GMeaTw~}^myZDc`k}uxY zg}k!0N$P;D=XZdb-cNpd@)~(RZutoRsQ?aWI$yU-*!2%Rla1D zVQlaXg*5R)(F;7FkwDzJjYU;s>Tsn&@J9JHl`d%#TzqhpveN;1pEOjCK(_GXgg{FzL9GW1ey=bJE_N6j0wNigE9YmiJCh8FSKum|xJ7PM5 z)`{t8dR0tq^n#d9pr^#N6z4%!&q4Q#=`^}SOlQ)KNXwWIyUJlq_%a7_w-D;PRJL{~Aihls9Ys9r=rW2hja`^y;KR>Z3r z8YiMh85$;{#~F&l8bmwA(8n+qQRcHOSC5=$*B*vm7tzZMy(pqr8G2GguQT+Jh}JQ5 zhlo<3W%P3qzr)BYMf3tgiHJ5ZbiRl_VCXCnZDi;q5q-+gaU#lMd32bFvd`z}01=fk zvLmw_?)zTSSFQOR? zy)L2xLobS`ilHY(RLjsqB5Gjh4iU9TMgad@#B&&VrHJ-oC;=)8-IwJm!~!crXNc$^ zhQ2PMLl`<*M29i7NJK|4wBKT~Z((YmPM{QtpK=(mx3Eigtd}Cy2UvWRKrcnm2iSa+ zKd@|z+*4F~^#qZE?`Rg0cJHQ@9S{q8gzE!(`B=a3QX=&MeSMVWUP_EU!0MyC?WH8> z0|xmhPA?@zA27s68Rey<=>vxOCXkxjPuv*+Hr_%pI>QAd8>XWNf13YGA0 z9U~VOUr7C~3Zehy>v6KHSb-A>u3nw!JIMO(Or6)06HD6iuCt_J$!-4eitSg)TYmgL zVf*M0ew6S5S4r>Xc-7ke=JLaS{Ed#p^wB0hVJ`XgqfzY_&ti@eJKmh77H;+u3%3Oz zBiQu-qX8QLSC^8hEBp2O4lnPnGk6HEb+(`mxOMNf#a-+Ew!V*e#d|uy<7~tsn_Z*f zo52|2y&&PZ5oD!f*`82;hFqH-Gxjg+3GTs&0u50)s*^JDv`|S~>i>__F*A zH^g8@P2p(6@}>@J+~3?-W9%fATKWSLpJL}OBaT&lqW%R6YrFhKNt-B`jD(?4RXy zt=S8++IKv}`?}htfNo<1#8n-w3+Q^_hZ_S5stgsYaOtD(?c z@7@TcaHPvW>O7EZBVX~i;FX!=yt2gq7H*Q{Kd*F!qi8-Voc9lS=<#e%8naE}f9!XZ zTp0TW|70AQGj4R&>!W;5W7}p6w{4?^s{@!f;rI&p0PsEF0?M9$BT`U^DJ;TLB?TLM zC8%&lbO%-@G{c8;_0hC@dDDcxR8ID8WRb!DZ0zG>bIF<^PEj944`Ac;&HMk95zD4H z-RGdNRk^UKL_BN%&06AUg^$PH#6v^M2-(y&P|M7|vWy(s)G;=89t@^(_)}f6)ER4A z6@~}59xNQVitw8?t=LBeh%nPWXro4X#XU=EZ%(hwCDzStRVz@2vwVzg2y3opHYCcc z6EAUB?ZI@@T(pZYk(8~Uk=PzVd)Y-k-`q2?C4!bk$}8Q`t82CtSI__IHss%FsjSb~~>_fbZszf&b2o-To!oLCEJi9LUHj*#Tzcl@b;^UH$Jd z%`4#EYtU)@91_0eNS+p&U(2p?2=rj(mou41%*6588@!I7f4%u!$xLF=$G!+!x1`sa zis_s;N%Gci9Svv^0}S@Mc@}x{ca5pz*vCwcSUL~oJt4)2tYCl}~bf6Xz*Zp^1kiEMjr(vVFakN0c3NgLU{cX}bci9iz6 z)sMQSlh?PW@Z)xp#oJ@~zB|dr?b&$p{B3)BdK3#b`PEoy!cNG5P>=qZVnUH96%S`pdXap;GilTabR>TB z@cesKO95R2qhkT{MAx(HQ@$#oOIV(MHHAqbgB0EvB;5$AiDofnRO8Mvn#E_DPmVR8?Xb-UU~?vmrX(vwDBfg{SLVko9^@E!K* zR%GY%+1I8t_X-K!twfcuH+Q$?`}~Il?MdiA3_Oo6$5%hP+-o`h<)qD#%E1p?qtAI^S!18p6v$MZ-GyRnWWU+wJ>bz}vk!d%_>+ z!@qNp^gGaz&;6dvIG{I8S?%%oJ^}4iz)W2X%sG;w69mY2!PSF+DPv!1$;59o=;+s2 z>{xuDB##fwN@RhqwOeqaty#@}nFZh@*Jn8Ov;RmT2fO=ycGiM_*3y*WyDzRAU=ZLH zz)Zk9fE9o(fMbB0fMdl12o6Tk^nOR>@O6;H)8a0NdNgaS+H<_<={MTMYL<5g*!dbf(kya(i0XgO>SA7 z8XNum|M7SfiIa9YG0k!pcTP7^F z#rB^=$f+N4ny=DABUzZL*^5)7iEurJ?-ft_Tu<`rl4K#Bt|$8KY3EHP9ldE_CvQ3< zLrnSook{Z>am|vu!bx-`9d9K0@tNL|`vzi1Nq>Cpmv3Zk@&+^6a3d+~cZ-F?ipL`2 zI7zmJ{QE|1*p6;5Xw}^;r0exqesnHtATYS6g-hz`ZS#}9Wa!N#zo}Ml_DMeLk#GJA z*?TitGM1Hv>1-C(u=p2XLrXVedFw=xZ-Qe&(BP`z<}7eWVKk=>I#w2bOH#e7|5=8H*hS2*E;2Az5fAbP_CrRTw3#5snBK zg};PAd9wUxc@IUQ;&a7q#h;2mrCe!L<|$puWy*ER1Im-i>&m~Bk*ZFreyV(xQ}vGO zQ`J$`H>w+|zf{fC3FRelTm5xZtQIwY9z*wj5WrS#yVrY zG1_#%RAahm>TfPIPcTn4b186y(_pI0glu`Ke2Tn6{)s}98K6njyr&J)eXd)U6{Anl z%k@V6VEw=K1;*3H`$pc>+|72k+4D7B^(pJ7Oo0Z_(O=5carPm{p3UBv*lam)$$YaU*!pkP711wQ|VMg zRQpudRGd0kovj|Aep9_byrLE)-Zjgev$q|{YHI_{+#|>{hxY?Ankhp~_q-aAce(Oo<)_Mx%Ca5GFO`Rt$CO_yuPSdSe^E9l z!&T9$E~)`g=m^zW)kP@vS5>$=N*$+`t1Hy+sK0Z;IR%L#ic_8y#=C_%> zHN!NoL9JhFzJ(G0so}Jt+HTtZ+R@tgpxzFC!7tFpoksfL%! z3^v0gL%E^SKn%5p^MjfPqJF-Khu|er!O`}nvzWCO*c$K%wx>l2##BThO=OdbwX?TAo)!BS%pqnq8y`q zS9t-=G$=KyY}EwSJk=W2G1WtrRGq0V!unpN-lYCo{fjy_GduHD%|gv`&3;X=b`lou z@T^l=LHZc|%lfhUQvDqL0(4xbZ)xanC@M3&VffH+%J9H&#(2@##jH2yn3bdH-)n)IgM`~_p`f5wGv$cz~ zYqdMIe`uq0xAihZu3>^&42pE@EVNkrKSfE&>SgKg2kSar!J20Y~N~&yxvkirLCaBt}GE|u= zgQ|zBziKE(Xj4s6%~I`Aombsam1a(bgI%3@JJY4f*M6;S&{pak#-C zjLl3jSfmD1KU3KV(}+@@?|Va)V-!VuoUgx-v5kF1b>> zL0gse*zlJ@Vhk__8AFUMj8VpSjVoZL-x$M88q-Nr2lFZO9W%EIXHm3sRS1w5%TFoR zE2m&zIG}o_N>uMv-%&?qF3EhD8LLrh29;^1Xbx$lSRx0sBXwtVZ|f%+-Z5-8>^5^- zyoUHh$W%;Me4?mTY*EE)k~M8K?KPb=T~S%1c?tgPbBxj5}aEVj+sliEkx zG+kHSaNVmqhi-@Nn2yhCpQXwgmbEbJ{j3XF$Ms*ssY$T>3Jql=4fhN&#=gd3#w1gk zNigY5eX-y^fJfhE8V>J$!raK_xLWj6T%u4f+{3=8f)mPw`}tB)t%z6lQogS&QBBEQ zn)!3){mhn{j#{~Pgl;Ule?vDH-J||luV!uX3D{>G@@evUuz@-`{h+E)q8u6y4ujbO zn@492IK(3#BWQ&o!ZeuA4q=aQ8Kx5{kCAKP{YT5oN=RneMFy{XN$SEt} diff --git a/pipenv/patched/notpip/_vendor/distlib/w64.exe b/pipenv/patched/notpip/_vendor/distlib/w64.exe index 63ce483d1e462373fe16015c70ec52bbbd816c11..46139dbf9400b7bc0b64e6756ce17b4eb5fd7436 100644 GIT binary patch delta 25576 zcmeIad0bW1_dk5@fy;ckAY5eTGARluG6{kP3VKjba>%hnQBpJIgBK%HK`)eeyoGKR z%OSHeD@zSiOj8sSoN-32q-;2b7Nt4sectPw1NHfSf6srudX7d+oL7Y3;T5 zp=_C5*)qGTRE@N}Z$@cnVdcuXT^ib*ebWAhRq(&nYH9dV#$^q^%2?a59=P81%ZBB^ zm2?v-))^!#a*Ca3;#GdXU`>vmjm*C!L&d2L(>#~d_ne0gu?tLed8 zY}jB;VsuHMhD#-xDh>BRF0f3)op)-F1Z>lAQ#2fxcAMk;+474~+BS}B; z4@c@{GsQF>E=s6E$<*=vV~ zZ+k=qG1R3aV@pm_X53HNl~a?XJQt2j7f)2wA#CL%wovQkbU%nnnUf@qW*f8t{Fxy3 zwbqvp@L`v=A-q9kPqf~H8agR5>6ZQIl}8m;L2OIc3y(A7>cmUZSfr4R6f(s6Eb)>c zo|3kCll&<`Y?4ZzTCKL+b#}ycNf+e~-ovpPG?p=PB-3@qJq8PO~TUvR`mY2rhxHSR3cu`t8Zf0{I47{~3o zYqeS%GQtX{p#WAxn;c z!^U8GS8B%%)IfM0gsGeItcC|FImUbpHJd}euGH%^9CED&GchcN`AWfWPY^CkuIL66sEe%WpT|if5R)Q3}mmP$YYWhA(($dH7$iUwCYsGKX*ji6cM1|a-~qz0?VGo z(C4n)kk}lk6`TKNzvsV{6f_PqtDw>}KUc8K56dk` zHihMyy!E@$0OzIWFu>7ujshCGAh~z3gpLW7b8Gus?h(l!v8l1-s&-YxT}WKN+r9u5 z{TVTW#KVFpw~i0nq7T$uU=8~5gMZOdT80#*un<_o_yzRl1X0uS45td7%P$xYn%b?y|>j6y19>pQjfR1P=zp?_L>e30YtE%GaT7$6H zXHuQzopyn|3Eju4CD{cslY1}MOJN+hmvXK2mh!_`je8jXVHmsY-rN5?O^0n~PzH<> zT6QAA!MbZT3_ru7I)&%0ZGxh4;=~^G5qG0USMn%yNEf2hMv&ei<#d;0p(2usCYZ*9WL`Xz3cnFmA#1qjWrHASgcQoCS%Ucbf3}c5q`iy*&TG-t8Zit4{=*u;T)}Z~7 zJZO!KCE=GqR$SFrpv_PxT?A6I8s0fgo;K>y7-X(5e_siOHj2>sr`d4NZs}PO5K6W$ z=39ReQl1t~6hyt%kSfP)mZB)I(%y>Ur`tZ1VudIn@UM~moldiTp1sBgQO8+y96;I) zEeI{-udsB)`T(q|%!;2(db!J_!cdNbx427#_Drx+2_;}O0@5+C>mSpye%fl-e3A)< z=N&Jh8_;!y(d=_Wm(aqV8jiG%`zPi3+F7bLQcat!hS$NtUj2-b-7^FR*~G|f8qI#m z8DVKAgEW#P(ZtTuU?Yq7>dIg4%AWTM>~|1rpvg-tYeW-OXw5#DJPuM}3WO_dptKZu zV>w73;9&9-!~t&N5?M?)fy5l8S7X^8uNgYhG}S5fOCXEs{GU#j`e3=)t*3q1sNdZz zHS}RMohJs4i$=6pa!`tSLdDTyUui7nNqfuy7URuI7eygIgc+r6QP@Ed&PkaV&wH#y zjVI+p)J=d8DG6wa8^@@oZB)~!WbJGOm@wpyOBHBO?8<)$;$(oxM<|*z=?3 z1}DcO!TVNIupnkOS#z38Y?pYqx{+QB{n8(S?9S-by1zQ1(<}oun8tfES@oE1q5Hrw zCxz%MmeHX39KK5L8)Zaq0u8y@ne6(QvhF{po4G|Fk$j zdIszU9JA5B&Qh%Gp*=u4;Euef9lK4$O;GN z;y58W;1rmqVrkGwuqw3FpRNOTUBt7%Nvy>>h zz!~G;6eryve@eq#A8-oHsjv1e)+-~|MoM{>oeq6g)53zoay=p;mkl-ID<}qBmUu$? z>vvWj)=$%q)rVb(U6M_y9vAORfm9;Y>n;W*2JH}~P+Ie8YpaJK4vC;#ourw!*s<`{ zVGet-O76`WhbWf5w~vw@n;g&zG}*>oZS5uGYkiNg@-FeE^D+5Qr4V!9Z**8@i?vc0 zaQ47)(mtQ|<>ahnwMqf#5k>SJRrHY`1fYm<(((?`k4W@;Oo1vX6(vQxIqiWM_Pka) zRV96eB2{V^LC=(?jO7NBr!SI|^2Vsb4kWxE!cxvCm4#y=Cw&Um9@@;LZZ~K*HQA#T zR+HSvBiVwkL2;?S&=kPvPvRGekTV4$_Q#+kf4S>TuJS=v5KmeSE5H_q z*rv-z`YiE6md$BtHH+-t&2A*pld9Rc?tkhNpq$n#YL}=L64f*9g66w5zL2T1oJ@$!Fn7=22r~W z!mYo^on<%xnM{jdlO;B0TMVn9_hgB6!omAm^AAB;>|ta|-**SWSl2hof$FH0d@%B` zS0++^uua_f0pmzj7HKoH==>?ul*>i^1dDd#JysOeT{DHPkMi|?uoHTvQa?ewVt(Mc zz{7mdO^Riw5!gV1C&+SSAx%H!SWyDvyWeA)=&0`JFv?N>7{MA|204nV(|=Xutw;PD zyC~2ym;&>6veD6@{9><~`O#}NZa+Xaz1Bt^4(BBH0_)nd*K=EBrdDPimt0YLG^rDm zNGJVQe{^aciW01nMUX5idR%J#xkJPaBJQn`5LqgV*sCIqKeNw!KI2yXJk3CT#bsz` zR;|{FJ?S|zXdBL@RdH6FHH;~J`^RWFz2zoF%fE+?=wxJtF%h9wL9QPa|{}QbCmh(Md1K z3AsFwynd9E84rJ!IqVVF8ivesl+gPW*J_xJXvirkhz;%)t0`v-dinDIabR!v3gx%{ z&i426@t&TGrk@g4h}O;yGsT@vuU(1qqW}KrtTq++%4Pj})FVo5nA1V>{x6 zf=F~0%^pV#tM~uJ6tNY|b#9bQJq|h%lJmd9-)Il_l*83Id0F}uec7HyeuoScu~P`Z z>>2VWn;74fEspQPuU^MKiJ#1GY-6qoM*a7;=qrm$qfm*%z@n|JZ^Gyy-fh^KFbE=r zid!Z>^J@+olR;iAg0@oSPfD+SM;mr1Mt;4UU_DiQ9%rD|KPYU7-09Bd6iu<6)$qrs z>{>!sx6Kq}b5szKjWvAZr_3ucq}yxq3f5WL>VgCI&;TjJseO|gcbRnFM}mzN){&e) z*rY^H*V98N^=7J~UYfzoiQPNtKb3o@GtGoozQqpFgB?gTxJ>{FO-fgYOs$5qpRk`2 zqjJ~%P9?Z1m{YIes4V>rg##%$SOj+wjaV5~|eJQ43OM(IQc)+e6+;9hnjUdllFRki$jY$HcLQFa4P9 z>^&mwXDB+VMp*>1H7(l`-isteU3b8ZWylz;9tx`~pcOWYa4*HLw`GHpy70&A+3cjg ziEC`+c*_9?FxWXv>^!Ss-WD4EQkMCNrr18N?#5$;;QIY?`_NDSNia8SkRzsJMn0KFW;~O6-YbBaa z)NQ&&3!>Gevl^dnwh|y`dGs zP)cG*#!gyyk*!XRa{lcOHacI#UUZwCObsnflauT?AhcUXE}X?|`01XkT;*Zk+ab5f zuOE%nm+w zJBvz7;a6>C)6+8fUDw$5v@WGjM^d}NAXAK~Wf^KC-2ww^h|b)1#x;&CH4_l9$V$^o zUryQ-u5lJU!ZyFp(}w1-_B{3oES_W|J#!rc@+fV{o)E+;x0D2ByKX^cLd6q0KU}aH zirxhm7S>-Y>O6|OBZn#v;{$y@6L^ZnGhgNVU{fjFH% zWACO1@WKeTKi#X;g~Y)TPmB2~!ZpsD;x|o!x+e=*SZ;LWjQHL#y)!k z6BSm1qhz?kbq-ww8l^AWknI0tZu~b>k#n^WPrY+luz^kLpB5cCjLJzvopxukenQKm zRB;P|1`xz$VQHBDwA;oUXQ%o{`CUQz&=;o8d(g6+bX+NJfVr>m9WxB*9oxJC;+5J- zRscp(H>+VkIH`7Xd@<~XfJ--GG3k435t~dZYc{Zj17bC)Y{!5I->Xm2zqt-L;#mzZ zu9s6NjyCuG<0;2(4=ClYTxX_?Nda$Nr4r#nx1tcK;P5GC*;4s&*^;`R{gq+i$Btr) zGNbv}5daTXpK0Wu3}-)QZsLm@SmnS3e*Z75ZeXuu@n=lw{&WKMYqL5Rdtq`5KTwM5 z+}BD0CukwjPKKaR`3AIq1ZqeA%(@Q>;rAV9LkEQfW;|NKv^mypr<^xHJP z_2s`%hNzj5*3V6Evt_~=e&Pc*F{>N@_(!%htDfIh&xQ@2%2)Mfy9NjIQwOligTqRP zeTN%bt0U~v;+s_FtdtX2>jD~4m&y&qXtUbfeZ3I`udjFrxT_uVBh8}<>>AvDc|ANO z?ZNr3N*rKYsWsoj&>!GXsa+@&E|uC>iMWKZeCGqJwGst1Kh!QT2>HkKyEBHc`9nqq zZEc}!9H3~rz@1iaP4{uturG3Z<3e`}(1ixh|;=>qf4PBbi_nOW?)hgLnw z5esO8_C-&YpFNg8+`{%}uj9M4un9R+`PY7C^*P;z0pG~0eFP#&du3f%gdI^{`Mu1^ zq;mxSOCnm4fa!sg9RJ0Ca+oCy4Jo~RSWXO=uH1n#w1yCQTKY!eb`(bhX z`}NEtcM$)}aW*0M6PI70*Q<)n9MPr#9lPH&(n(mMDm zy;~Pb3#fl3P1*@Sa3TGPT{ggHaLC3&JEwl zSANBcN6h43dXrrrvDf{nGe*o1Zy~?!?!m>youz(k+ekl62s=Kqe^eZ8rRF;9x6LM3 zDO~a8!B^}Io6v$O@?tL!l7J~Jc~mdnDWA<86&8K-KIYsHL3At?UOGBPuyHxmYLDWF zVf^1scQBed$DI{V;R2hNipR09M@8^&O|JQMR8Nhc{CWZxv1s&SgVYGMRc)_P-?H^% zd-FFZv5RA;@SophspCTUD_2pi}^V`oG{+_o!h#`qb&-$r01R4mDU(koXeJs0YUWmnkm z<2yTAVYNZjeXE)4gzNn2H`wh7Vf>C&%;))i{;jXe?croK{HH>u3X&ImYnp_~czGrJ z^Z6|AW3XzKEg0hjj(H`lRr*$YkR>bG@E5B1xtG{aFTBEkJe)m0u}k+a&1BeX!Cq>x z8tUMIT}J~|%6!~<;~0i)O1=1e3r1=HW1mg@#${-^oT01LaDFjcnCHdEuVAb53PYY^ zZ~BV1a^$mXISggVqnb@#tuF1nP0Lxtq@fXAb|U3RPt)}o--`9EW37gxi)b^^7W+vJ z=i8f9`u05AFv%z62XJmgVF|!}m#qZEpIMYN!Pl{(*LaE^v%H~3)A>bxEIwhmirB+I?!6D_`{3We+us5`_psA5=ucJta znJMJ|fyq|Y%EnFY!Eap1mQD@jQ(D=!sYyPQ79b|BGDzFap=<yQj`Wn0W#P1niWS&=A`s}?FPrq zc}WnjWs84g9ekq2<^9YXY~hQ(n)B@K7lRWwoN6zMR09&}DC40&N{jyVlh!L-srVm3 z{uDEA3-rC)-y7DBW)m*g=ec1`Pq%WdUQ#X;*P8g9-8KBeJuL6bL7^2}vBlLwg~)`a zUzu4;7W63TBQVqjDgFdI_N9rRyNadl3gw?&RWo&0w#KFA7`5+lNyIY3zTVxRpL&dG zY6EQ_VCu8b+8MUa(978J+QWS3qipn^CARNjhSpr)Q>L*UYe2}0dqe#F_G0%nA90{t z@Z5C${wG5IT@<0tYH&BP5BDZVe=plSo8}-Z%P6`au{P~_5p7qHk#eKh7ls)x?Cm>b zp-<;9=Y7Gp-eE{PX5aU=OLIVU`g#+8=lz;pU%$=s;p^D5b%DCS4|Y^+@j*7fE|lq~ zb<=16j*%~paHA`RM#+l}npWs|@)o8{qr~U3x@qwP+UlW;#9d8?p&pN(j41)7&{Xu6 zGL$Ae|2lLsQh5fOQ(HLGw85fh3tHc7dI1eIdyjko()Rq0^`728uoW5y!hb}_8iZKu zZxrHx%j5UZX(S$Fk7M-itTV0JS?`8n^h$Qds=^U#&CP$qi4C3( zoVZ#%zP}^VnElM_rA_>l{p_ok!uX3bSj$TZo!*@xPnkYs+)Be-_Z!N(&P?R@u45Bt zhVw07v)5+|t}lFzmd0S-hpUmLU$bv#rX+sznQSb1mS&-le+0*|0l0(+E#3){UL=l+ ze7BaKGlYl0n_bzpL7Q`f<-8mhAnb*vTErZ;x3Wu`pa7qPV{L2Tp8o-DV}j{l;rW=!EBP3L!h!hq09&P_C$17^a9ozwFb)~l!+ zADGQ16!mvEfJrj1V>N=zw%u%7k)P%_cDyLeH+nZBp!@3(FbQ;73TMit4&KTf=R_wK zg~}~Eqx&vF&q8`K-aLm+2}$GmE(vi@kk+P>LvG4Z_hqs=obnjPs^=>4-M)Fv6{dt&=I! zfRinN%D%I9vZmrn*P0p#TaU-pWmE>dU&CIU8_yq|#MaLp#V?-3?#xY&7&J+)1fIB_ zlCD8f$P(|!W3Ds!bV-f?j6lapY}6}JrN=i=J>(~S2Vyr}i+(#2{*(qRsrQ<2#N3JC z*10sJV5j~yj}m}o391Rm46!*Y?yA*b{1?5GL?Defs!HHmFtX{2H#_dCP)prwrSoZM zvrrp{>{gVx2KGZ9S@+ImR|cd~VIZ6)nX`kjW22)oDPK9Yp16B9A;xqi2COhaa*PdXjA!eZ<<-3CIsZGxw+G}r5M#v)lt+7v z!v~n}yk|m6&Pg&1};Vy?0 zkboS=?g-0UI7vT!A@jVy5ri_z2Vl!yeiM<^wM-HJe7hrlngKqCoIc(oF4>Qe+A*$ znD&+0ZxBF@8iM>TV^75rgJ<0Ba>FQ{O6?j5;jM|NeQP@xE5C!&z77^{?%ts2STrYR zr(82(zeL_E<;c31DMW0u8p?CnJC#{FjVyWE6mK>3$YE`jo?WGF6!)~r!)nk`9F&4i z;doq!l8L%K2(}t-XS2wqA^cmfvSCYmxn{rn&zNc6z}{Rc@OL+`=A}biKi=?9NWuoz zODy3-H?U8|;E*-6kOzy}Dqy8FUy^8PH&L+X}c|1jo(As)eWcM#%EN=e0RLsb+z z`(`J8`#ko`n``(Zud}kZlK49V*;j83@^^-6As3FWI-u0-Xs}TWck(qBP}QCPaxcrO z^6Mi&EkdIj@a04v9;@R4s{9&n57@Y^{C9yCmmq&UNXo&;J}ITmLH_;t+7kACm8o?9 zE67b8;wF8m5Z=;Og)mC*D@26!jzYvp%Vok@s#F+hQkg;s(klv)E6q}fG163p$de{0 z#58GyLL^Cp6{1i|f2CB;y+lfqSz;N@=_rL>LbOq#tBCeg=ru$;DD+06AJ3&k#4SX( zDKthSC;g<*wL~{7bRE$r6uOb<(mKV{M4p`r-Awcrg>E5wokB}QS1EKG(d7z#pXd^W zCfltvQ=#eNTFO&s*M-)R_JC;DLiS4r-cII6yAgd;p}mQ2QD`I4=M_4F=tj_`7&tNH*{1{~5xrfZ>GDGQ zK%oVq*C=!@(aRKi4AEr@okz4up{Ef&O`!{ko}kbrMCU4W8R$|eL-8!3fFy;kB05T; z*AQ(~=#4~sD)bhj9Ta*y(U0fIwXY?*jcDA6kH^(DnT8AGhu8F$Ms&ox^Y57a?B*Dx z@NjMjo;vH!DXdyKks)s2991}*70wuNtO{qP!pQ^YZ-ujX4y#&aEG<`jW0i0t!sjWR zK?)}VoI-_@pm1Qc)fFh5P=%8O&IEy^cOate+QiXL@ zVHJXvt#FPioDy*QE1cajM_z5fOi-Ac6ecY@U`8lRi^8P!2TVVOSwc*;dDRD;3a#u-_h+^f;JfFHf@Itdw%Z6aibhDKtC34))%igW1xx z!Tj>U?6b9Tx|^1cgkH68bt^AGw~eB*OW$Ulg;BXwE%b%IUfK>enZCS+CiBWlS@*@>tKoqDxDb=%UxJthXB0#xTDvK72_GOWYE}-(Ab5ZQ0*vU{5;G zwvYmfj{#5tyq8p7?;;hSwZw73D?Rm7v^+`-ABR{CZ}wn$ANS_Jd569IaWEg-gYEnH z1wPT2`F|43d-$?vK8fJ}ew$7IB%Du-VpX3Q_}y=_k3M;w@A)=!-x}hyHi|k+U*6No z`fl}Q#h<>z=d7U+eorJz*m{be9?7(yMaD)|%O!wnj;F;`4A^>I95N!0SPk!Wm;2k< z#uO%Gq&zLHZFRs~$!=`&X9>Ou^E+C8Fi5<4u6=+5xWUkk?fA@}59-cNe`e?tfuI{D z2%yyQ$&>EI)s&m5m6qu<$29n^6bL;9?eWFu}F-EhIZ#$j2+#=vUriFC7f$dro324n`PX50zs-L9#A3 zbkF4_nV}A-(bt&Rm4jgsMc~lb9o!MpnuBrED@AwsO(fh+$UlQ=vF}aR~}Wwp*17UWseh0nKw{!9HjI^M3-M+ zC-ZT$8p49v?}sAzV;1IJ?{E8)5k~!{X`U>*-VoTCr0eiR{K$=67#`TyZh?a*tyq)t zEv&RYX9Q`QRp!@RxZ+@(W!%(CMRoXV0 zwc*QH?co>s^f@f}o8%sMVeMD9l=Y}G-4Ip)Q~FL9(Uc3Yt)S0UYw|deE%_!fWNDCG zTo|;=0#Hrapp6rqL3$f$;VQ7GkhOf%SvR&LW$BmM%=SnI?^eYIAMxeyyu}KR82Mv? zY{`*{-KWo{0?3=lT__JX=iQ9=V6-rZQv0DuZeP7=v3(b~Osx#sBA=ZY3LM7p1qw@LKBt>YP zDzrfn8ZwJbY>eeEea={84FB`X?3>1Jn$7Hw#!Nrc%Vb1$K;5LjDK2t+t+faVkAImB zJNoRzl?qc|F5qrL$j3LBVU74gv(X`^vImNMVVhogkPWw!AU`i_p1O z+ds^&xGLx~n&t1svSBJQbn~W69@gPjK3Km z?Py@(C%W^!ma%6~^!J|rGnrtOTiM(U%OIYfM@S=nW}lw$>A4f5GW`I3B5xTDD-XUe z#UrdsVA4G0nn?GydS55PNl+7MiXH9{n^yt>e@^{NyOIdhV>*jais zjbiAiopA|+*LTPc`ZqL8M^xt-=h5}y!4Kws_(H3BK&86Q$||B2l?8OUE)%yg$t z`p$ik3WVJ7K@!MXqFS9q?XFkYol_$N9l&chIIk>##ihP=iXe_k!=Dsrk!o1M>9|h6 zE{Pd2W}k3ZLmz0wru(fS5r?V5_z zLy#k+`cGwBno>RIPo<`L@U+-P%EE}oNSZN~-EZp3CwsEsGd*Lr3MI zwkeb`DKO?C-bUP6K$A(r@_YACnn@>}ox;k_q(@yJk4EWuPIE!=<+VSjiT+JqxY5PO zrIR<DP%GWI~P;B6>289xJKsEGO&&tt%g1BXo^bh zU&t3*y?|}rj4L=%_k+y9t7crPKX{7k2g5bJED}*~4Ihb-YYnF_;H}|#_+PaspGd=3 z5I2p2_#2F8xzbX!Mim|Wp08IzSl)TtNb}ca$euGfE9=`lwDNxm#VFD9r0ntR={fI| z*#DUd3G9J7h!1bnxH0V+C|RRf&WH}uq%G{(^FI7v`2Y`GcImpq{%$oa)3c@L{rO$Q z_Oz$@BCXA5=@)(MU5I{77U!j8cK>`9@25z~AzVJ2bkSg&E3sRYB3pXF$yva84&R9! zlkSaW?_LPz*G*=JF2wl!Hc#nYnC`Pg+d9Q`?<84oGUtmve9~k&YgrKpn2f9g`6rW* zc4ti$d-dX=(lUHMOusi(+^FK`DmJQkQAJ6`$0~Nh+hF?jP;sz|6I3ixafyl_sQ8tF zA>1jI@x6+_tH|NKGyUAypUs_1Pjpp$`&9f~#g*--N^eoIR>i|AHmUfdig#4B3schd zRgu1PqTfgrrzu#<%~u(#Rs2N7S{0jAlvH#JR}wO+n53eh;&UoaQE`@v3sqdH;zk)^ zircC(4yo9r;`b{4rs5qHxh_fpI;j|6Dq5Th5VJYTv;F|E)9hz#gt`2i7O?#{elM+L}mPd-8 z)?)gVseYP4^ovpbSc&9c(?dlbOPl;FRQ)*R$Umd%_fY+H4-|Q*5A-YLa#aS@80Dw> zeN}JOeINl!>Iu6sX>7EiEsCc<0svr8T{HwdGgd0_VuIdk#{UbOo?~WpXbEN!B zQ~gkBe`jb$z(0wbj2KfPVS>C7 z;YILKO2jLaQ^^NC2mg#nVUC>Mi=ex5;fOg0*atp^#(C$&-3hGZo_kIDJr5hVw?2Dk zM8(mV{=2f0Yh4y*e$J*m%qw-r-x>3m;=*~vJ98ex^-a1u=fa3F7rOg9asE^EoPWG4=U?RA=GEfa)Tz!*PE&(q zJc2ncIE3Szevs4j;WR!@oR3Mz`JfO!Bb_;)c=(G9b!^NdBk!Hh<~@pKX^*{2sZf+a zStQ5NTZ$e^0-U>?xxJiuyUddw9B1DX8;L5T@pj_8k-j(5_eT2O!<9m5Y&D!Mwq8b1;tM#>8{nWHtQ)(69Z=uW_>FoX{Xn zv37<3r}8xhp-%=Twuh$R%I#ISU21t8sb2j#u08w#!k)ls(0)$_vSUwtJDPL3z;QviLOicV zb^lkz6t&k@9rf+uBPMd(bU2F|uIXgUb&A(6ml+(9+^4)_Pb|L2dZE^4}0SJ`U3L)_i%ICo3}_gI?iPI>KF z+gEa2B;2Rht3x!JLDh>injsy<^;*esOX1f3Q2o6|6WC2|(c!$>qE4JskzL0W-vQI` zc{rErC{gu6UgKFE&1*a`Xsi44nl6MhcuitgYU&t>X!KY*uw>}5Waz(eF4Va4oNKI8 zjK+zlo~k~;YYf%fZ8X0AQ_v3M{*Ud@$gi5E@#-jURvl&$+?F4!18p=Rc9fOT@4rMW zjrc|a4kELVHkE3rji#%ICzb(P3Pxfnz!awCpjxugM0CuU-%sQ5W)sJaqUz!2E-zrN zSirD|#bXgGGPHO!x!1M#3H@=Kc(_7Ueka8}iw~&V+x&V|+Zu5^|IRz!J8u1?fy0)6GS#z^#uwdIT& zM~zRRcZ|$)s2-=)`1#;VVqa6d#y37r^Bkr&MVuomDC zU$*~*P|$7IVO3?B_)EasQmwJmbY31~`+wDMrLq36`u+cF_1lgHZb@kMF$Ybv+Z4^f z*+nl;GO7CA=-nF~HJi1&+%y~Qb8_EPRKfpG5Ssg>-9x=KKiUQq@)}v?$9K^(PE&CT zVRdwX#?L)hWeF;#RsSbI(>cUb^*X3%t0Jf3V@>sn0L{>>CXJGEor*hE+^XUo6+cjM zor)_}T%uxGb$XztbMP3|J5ohK#TXTh)r%me^H#lXgw?A8HIdbgfg11Njhs@NB`TJv zn5W{H>IZ?EiKT5;rMa6`Y*evU#UxdHi|Sva;t~}LRm@XSP%%cuFctk(bWpM7shqDJ z*GNRQafZg9ZM-SkqZ+YX>j7w7%LHqDgaVm*VYq&j`_U z(Kwd4DB{zqaS=be^e6m5u@i3CByp~40=))kh1(9A>jaCYC&xu$sm%aJ!$w#KehhFe zToUMY#Dgn@4B_uEtGa=113t!4>po-%OJMvLz$aV`NAep3a4b6jPnriJ1J^^d&j7s< zcpI(^v}X{Gl5j-l0t-~S3|Ozyhk^ItmOzI76mKshwtj<#iw9m;X}Wxw69PR7GFyRX z;mSNp@z;_;xQ9ZU_CsdCQE-&m7~lhyehlmsjz5(Qz9(=i9Mynuau=nw%78CLaNM_$ znF-ttXNwOp>wrJPxq`BjuBJ*HQWT?MYs>ZFKq_F zaF(F{UI|(65Nx4D6YXQiDzcn)@R$ z(B;4{;V7M2VAlcQqlF@X1L3IlRlx0VBtzJw(oF+OInFCX$v6f$60Qx2<^fCK?t?A^ zE`oaux(c`)j`Kom0lhQP$Dj${g`?)J1#ZAe#~b{Oz@Ol#)*;Gtd&~?C1 z3Lp=9EAZ)yO5tE+;X-G_Kn{Kw@R>s73p%Y7L;)OSFb((z98JYG;GsFFJ%kPe$C(uV z1Yk-rS_1r3;2b#WV-xTjI1?7JM&RVRSXX>eW8nTp7^J9V9WZ&Zl1?gc30x^fCc(sE;f-@(zyZvn1bqVzZ6#7az8$map8 z;V7>)z{_w{VHgrixjU+d@UW;PMA&zkl2973(4x#~!e`!6R=_l1Vij#N=o(krvY14`X=x{jQCjpO&}h_QF9S)UWw@idJ8aj9Wn<^7_uIN6Lc8x zIXD_C<-o&mRGSu{=X*GqK!z{}j`)Q@?@ibx!6!6+fHjY$S-V5%}U4=zGv*z=RqU0(25^Asj_01D=7Sfp#7^eW%iLGl3}wFsQ&! z1)hPUd4C?byAC@h__aXqMhsrig!dZJ|BvCh56_UJN(R}$PvNNcTZul7fr-IIAIBu0 z#KHkO16T=1>%=nPIh8&STy#nqL*>98r?HGfCI@*fp!6= zz)>Mmf!z04x=Rs2geM#gD8iTF=7V1dv|htrPBJZcRu4yuPYJLFZVTw0z>9E{Xd94h znpIdO3CTQ3G$ENGmtYJLlF5+xgry7A0K&Cu03q4&NQm&fN)!I2(uBGn6&W(9k>QNw z2?wY&8Oz9IMSMbwN)wWG3H&^nAX^a$5#CVgKY?VvAwJn4^DtZ Ai~s-t delta 25342 zcmeIadsJ1`_cy%Hh9lR*LEvx)4>v{eg5U*23=DOnsN@wdDT)`olOB{zg*s5u_9(h6 zEH7VED@!v|OT1(lDyXP(6xM=o) zE|xFnSBLnsMx7&Hc!53EwbgZFA^cF?;k~c$Z|T<92RYbtoFM9NvPS!;^bdw0Vl({u zl;dPmbRDJCB5~7T9VZl(=XMl|u2}{OHq&JY2;ynkWKS~1`lXtHi^6>C2FEporP=f+ z*njLhB?!gi!yScUOSt=J!Fty$>MLVVocLG}@5f$C6Hlhu`aU%tsYw-2N@Kh@E={aL zU6vqG|DPCkQ9rC-7*%F7t>}wfw!v}zVt+|1o*E|wx^i5ySY1?(G)IUS*B+Jfp2@Nx zLdKR7&c|MmB7>-%gX1JmHnUYwuny5QC3#7>?ub3cC|SIcDxOIZPe@mN*@vw{Cxt9S zjx0Gf>L0=GN&Q|nJna`-E?$v}1J$Ba#EMk$ ziXfhqX8Dr*SwXB(nuVsKp#z_=NQdA-Tb3%dP;%5WR3vpba-7m2v0tOof!K1pes_ha zCKPSbicS+xplCE#dgP;Y9pyPaVXGZXyxSAD-65!VMS$9f=aL_`NbY}(Dlu4Mt?`|s z($%N3Jj6CWN*!+d|^e*HeCa}3~1f?2R*lG6}qlVIzV)t60o4C$GEo#TqijfI66 zjBhYzT*Fe#tv{7#8xfu@*wVr?Bu_L_5Gg7mD%MJS*f)-mzW1+D-dz7!%yLS2CR%*% zZ|HL?COevRdCcS#!+&v&r8;#m`~^v|7P79g=_D&~YCAAARmT;bv^d6IkWHhIT1IsX z)?<+sq*3ld@se=gk%Db@c!BWxU917ruYc$dC5b&I6!*&!#J^)N#GaH*1Fx}%PGP+1 z8Vhg^Z1tZe)W3!$ItS;A-y@IBv=$t(N}BpumSZnaZ*o#>eY2(9G>$v)P?qJ&l<=Hv zR4tnhDj7y%5UZs3N!4toMQY|qWIpg0gk{q^s!$aP<*GvSRUtbPdY}m9K&Y!=8(BkA z>8jKmRqDsTsbIs$U>>9%SN7Y<0cF$6-!m7N-ua(jRazYh=`^tt*%u;1ia4HzenfZ% zR$xkT(BuI+d(=P+Dz|3Fu4rt8A9izx739V{wLrUm7|xmwr4ldxGeHD?suC8wP>9z zekXNAWd~@GP_Uh{!tzaDBNMVni6n15!E5raB>xZAFhnIm{Bc@Jd|Qj3alZ@kZ>fA| z!CJv1AuRW{mQbxESRALN6o*kFE$s16WLaI!Z&L@A24XLuWp(n=i?Te;`a)=K`)SsM zP*v(3NTCDNrcWOsxw$B7B~R@`KMz47xeQx?(tixG!ME#S%Jq(tYZ7vyJ=E(6YHftt zC_gHx!3d6Vf0g9UP8!C0-1gS=zo?`Y>CBbySWmY$ZP#FvkA0Y8o90MU;FMv%n6f4Idum4Zp5b%k{{MtP$K@vO3o z7n5*S6D7UN<{1NYLzrmv^Ufo2C_o0*7j;)@Y#Vma80fuFOKSV?qKVWeGOAcjD;Wv1MXJ z4$`nlB_@=tGGtSiD=gpLJ773Zy+FDBQ3k3VEFKd?r6(iVZuel_ckG<|xcndWl+#r4 zmm&j!B;z|Lpi$rciPja*4v$O2DH)w+GhBs4nph!-7qKAHu+c@P6;BB_OQ|l%i6v45 z-LA$OjFeu(MvfKw{5Nfyu?_9i5KJ(<;;G|uTMI{zx=(v_RCbkf`9CX!qbJ?#2m7$E zTX#3Og<*f;ydf;D4`p)eh|KUdR70&$oD!Z+x?+${g_4XfWE#G~+UT4cD(zQTlnuR6 zmu(ta*Bhmkl*E=dw80tPk48W_2=50_kxkf`q{RWO(4(u{Aml$p*^7OpDR@pslcy~)^kATM?_VS^%mPTuu(4yn#v=ZC6yjs?N3aFGpokjXbBL+7v06>ikM+Gi8)F2UD!#VnT9hkgtR3!HjpLxzTz+_Ldfy$kh{=#^o9z(+p>3e^w zgx0Kdu7woD6Ad2HIY>zdTbQxs?7M1rHrYR^`+R4#Blb9~Dz0Db!@(lX&p!n5DmBep zN=A<6#eBvOt8yN+QR>o*UGeYYzbPIYSymky^n&yo5)YvtwCaEIW9EQFeybmw7|@Sj z7tcNli04E6*v){bpmi88wAKTi62$MQ{db%&pS2CHWTdwb{6g28T?`K2WB+M0htKw5Zss=px#O&hIoj}nS4)P@UTn7c zgx^`u7S1@3)+)JvEDgq4HYmhjcaBX93FEhRWn#!+KGKtYAM%pUiFFK()m>(pp;7z? z(aaV)Dn7`grI1fsD}^-pOS55-mr#QoU>2%->3y(hgvN)*(dIR=f^CoJ>-H(unD~z! zE)l1tPy4Z^h*-B*ljvBhRED59g~hkso!u%K%@V|*RbH4_@xHOpifMsimK!U?2h9Z2 zk}E|GB(>LXLrl6Y(3HIDBzY@D?r7v4PI+O=4HAkTTKbb=^uquu6dSz@G{X`rof*Jh zYUj(h8Ng<>dr5blecdkI(+HU~9Hs|()R-z(OUv#vb7UV~FE%mqa=`4qRKjVoN#dz? z^kG7xvXr}|vZ3uweADl2TKjjyH{b@Ad}#1kh}aTWogtelKIko=#Xk00Lsub7|J500 z?$9ki4~r4H7ivwM)EXO1u3icS=KumHl|dcVg*2f-mL)%&N2+KKP4p4g;{kUPwY7-G zlIZtPyb@_!Z)HrJ4?v8Q7XC)IfA!aBQn5ZL5o0C{<@!_PsyiYnxL9KVoz?wqAcUo^ zgESVDo0EEirC74km#6~j?*N4jB{|zb<`>l_wnn0f$0lyZG*RN-dq_NiKMnq3ll5@; zZWM#<3Mn6gSuGv9$7V$Z^VXqEj0$Xf6s?d=VbnfRzY{&A^B@B!3~fu9%B4-3vxA+9 z>cY=X7ps#`40Ub1fliChd%}_Ta;9!}K zdB;`e5LQLvr;y@uoz*dWhT+9QfP{lomtruSI>c%_9_TRyW-4b1M(%!~BwuBkEp7+Z zmYhMQV2gw7la=dbs(3lo?i}uim7UtRk`AH?j6Ljh-*~=><0@3+Le&2f#~yYL$v+Ci zQ0h2X8Jl+^(RHX#xGmigYFiw%jS`J&VxE@hP=>9NYgib25~f2IN)(E{bb|HUXu*2a zQIJntoG=`NAl&e)GBKuckm+X=>{7+5G@GewTOC$Rxp1^eZ@t+jmA%^~A<>HEQIJ^W zLVeUr|4@CR;X*q!2KVIpIrUHkwbis;+YDv6R0s)I=l2n8`rc2NTXcumOl%{@gR^{% ziUm}G`x=@9TS$;2Y-KR1nzMgz{o8A)r}e12)S1nS_V*n}iMSftOzo9IPtEU+LO*-V zHb!^saLy6+SdT`d5HHA4R)g{2d|xB-*YBZ3+dxWuw4ePM9mWUy@Ad7vUgtgsvdLw3 zijG{A7L8yPFQ<$)v(Y4dk`&b4TXS%658xQ{O(J%xp2vI~(tRk}LxrjSNS~o;N z#9I+@&_qT^Y+&pQ?yn7}C1@u z786u}3!Ok0gz@1?0!}yqa~3_Z0ylMo!IgFI9u_P70SWSi6xBgCS!@#j5U*PQmZ2tt z`(sL`gy&!u(!wpV7m~}?q92%7v3sCj`@K}phmO!F+8UCuM?)E<#UHTp?s0st57^`G z-Fp@9p>-)X$)-i1s2D*uO#z9HxfjW~uGS|qcCgb@HV$Dyv{4g;*lOwM4{Sz{?miv9 zz(Vy=7b+<#G~su!*eh>&vh)IbA3?0g=*)%Hkt|+Kr4!{3*|h2l_H&PHzS9FXxTi3! z9j1~NoD-Dpg}<<>>;-GNI~7);+>0Fkl^)id?xmz_%lNAFGiJKE4Ecdph8L`pQ%Jyq zyZwMY>Di98jSJ-;Z(;-ECi6)T*seIUaT!KOu(?(V#juk7cC#PiMh`0c69*MkBuXf{ zX9=*r=BTrn1QGko{?BO$&PXnGY#a5)f>Ne~A$(Rt}&P0;1Gl1*=Y z&hq2ix%a0eyOU6WDhyBD=j^Na(Dnh?QaM<DvVQMJU~eo4XMPDBSJr8BNB!u z)x-ay*~tq*HYBClBF;NNLNuHL)UQCjV4YQ2m4F7^<0HOO*+YV919*w7c;`Kwpiq!*&{FI^>t7$neq>!lNFEJKZzsj+M|_Nf}n zQDYm_Se_bNsl>zrO>y+nQkAip7&iTmZvhEas9-Ai-x$Efj{6S0s>~(HzxDLrsiA35 zNXWF3I$mWxk~+G)-GrmjAGt#wu{lX$`8BoF(iWq+*<&#EG8V%xa9d$aGI3IVp^QoX zb}~q2{Dzrmk}ZNVrmDq}n?RM(+Ol*N4Fe#d3~Xm)04o@dWUMe(I#*0F;AsO}rentB z@IXH$a~1N-rmsInYO($Z4tK!kZQ0P|9_D$M)b+RsT-kKqZoBb~gmN7Tgq zO5%?VeC##p^NVbF-~LWJDv`~WxUG_X-?xjCRgEoCV(zgx4Zp#@gATjMrduB}zZbvY zFYIO2FDCF2yO~o;KYrBDEIp-d{*MD`+~BCl#ja%=Vkeye16zo}n&|v9M;?svNO)bQ zvHzX9?cYpQ&TTdEwC4tv_oxfkTp@NV_){~ z8IwcViuF1>v>IB}9*D_99Kh&;)hl>HvZ=#bAU~5;LF40?9@PHUy5_@Str2lJ3 zhvAD0yKIs-qaaLjCu`y=UdPyge17q5<|RxD>T-ixL>5VoazNY{Q|+n!c+PNXGg~9r z`1nCAB()RokqYo+S*d3J2Z0r(ZsGN(So?w9`3ZMe=D@D;QT15Ted+EQ&?vj)dSh`* zzo6{qwXc^xJ4=gP&qGiseha1_67>%xc4}ZKpLUl0HZU~!5=5*Kx1J%*`s7(|M5X>a z&`>QbIAIg(JSaGLXA8r76Rd+v)+bMMy#x_guu6T&Gd6QjJda!Lhl4us|J-Kh2c`H- z-$-g+#gHC7wwDnVNam?FFF?MgB8NHSno%~73lKX?Xp~q23jBMmzspEQ^x%%gIM6u z5pB{ikd3kyEbFBX!o^COBry`1r1b-6ibN`a zjskCMJeeF{{7b^1N{|60z4tG^t&&}SDKvkE$`X+LO#wzjh&m^YRC(!CD5Cw(W2}lMKw!M~dq!krx)4xpO@dFwy9&nRVbDgaY zXs=$nb%HG&=F6Wx!Age(^E*zk{lk*^wZE{(!=m{$H(AHwUHMhF*u>#M{Ndwl+3;9C z_5|BId;q`jEPFcqQ`b8`wHP*w*q#xO4ZqyPwZPV2hx2H)zz&RTZ{7utH7?Xx^cI>T zZAPrvw_%x73Ialc!1z&dFN{)!a$}`5P$|XchW7A1M4oWK4)82DNa_J@C0;XtPP<6$ zl?XZGrLSP2!e_}srm9USXy$b6o5?<2Jc?!05rfG)}6oWr^ecOus$Ev1wfgrG(EnO>Dw*qsnz`*7y!ipTow1 z-11tsY5dGy8G$$~RIhM=WJHqY{|#42NA#?|PLH`AEW}>GQ0t@xsdTy+8cSK#*IVS6HSeRtfT}0r!dJmS5^|V}n3JWix zLVURn>+Pkr>}KZi@Wn++g>G>4K$a|lchPoKZ!ENdKrPvfotl&rIzI*1OR`YFy0Zq} z!T6E27VievX5M#e*!amqvIg!$RZpHJ8&j@}6%Au#(_e4U8L7_=kS1QBQF1~Nl0_jg zK3sh=Ha2V1oLqbHM#GnA>#Inr?+7LydHG9&5W}E{KhOHbd1lB;PJDQtOle#z&wx-% ztv`bDVy_jQG%m-H!6hVKfbD9ut_GbZ-cA!Ar5=5zPqQUf{?5d#4nC`p5&5%~{G>w- zqha`-Vbxio-FmbXDh*P^FnUN#$i#3~pjniTyvFBZ8){hSlt{z(HPjkKBd6D}(Np5w z-T|lClOIyUHcs&g+F(Xyr_;o5aZM=sgO=Mv%W)y$$pTktWDPq%rEh%D>lo||w8=KJ z3g*rcFS2wV!Wbqaz>!?wvv_D|vEGYpWIgACcAY6UJif=$rgq^k-(yRs`tZB&v3I8? zbbJfvm{OL2l6aIvrbxDE8V9~$TTXfL9Lnx;u_^T)`)6v3haDwYD;*OqE?M5tN(}n* zE*m~A%6(4(>4&ACko7xlCU@D&X`T7%e0F477(eeW`*~WNpKTd(;z0)O5QC{2L9v&g z<+ESQyS8(2*CkeFlD&>%A>Z2)STunI$0bTV&2LgL4FIBvl# zLA;SB{+@dDAIyLE&3D+`!~VKzb|yQdM~{|F(jc`Qb*g)zVVLRwsk>IX;kn@Zr&--$ zUsj$QZLE2W=@W;$)1!bY$&JamIZiKuhsd(=EN@}AFh`g~C9&6%t>Irkf zXv1LQ8P3nJpd1?5xrN@&Vuf-$(ciwqeqY!(cq`gHQ1to<#=t(L$QAWD_^V!%rdP9} zizXW$!AQ__t?(o}vM7uX*vWogG|Ocf+73NwC%tr%jmaDDmwyeXFWm<5JnVK=OD0Pd zJDazK53FYMUk&Hq%wy|b?d~x&PnqRjwPP0CX1vl^0 zm^IAFVLY+udz!6XoWS4Q&(1IQ53N5%8gN=Ff+>_%9PvG(X~pd-^?JB7$q2ynFZCd_ zXOXYDdrvtID;!4Ac*seyYM%5{Y{+Y7enKA0d99yk8>}U~SORMt-a51mGtYL<2=Z6n zdK0q##;PnE4mPRW7>wqXpPBrcIlUM>Ww3A^l$MY{LWLz5!Z}3KB`#p0GDz5S>vJ*H zwRBTM`nF-TTiif>!qLC=$=09o1lctGXEtw%4?p=hTfU@oz6Tmj`gZ47i@ssGlDRH{+H?XX?+X%Ddj8xJ7f$>OMoTK%o&e9X(d!y>kS$sI5+7B` z&LQ@*g*7Y<<4>L1>%Qz;ozF1%pI}sl;dI30g_AeRraxTB>hs(4?`JTtg1(*3z+FNz zn-o7m(DdV&HtbQ?asz1N*@wj5iFn$Mr(~z4pP02E-2c`w6a#%IN0ELIFG%1l>6E;U z9WCgjo5_AJ@C!UfLf~a@LDklXeTZV(+Tm%&uZ=AH_0HY{zfqMT1}T!CDiv{zWxt-! zA0Nf)U%v)>g`Idq?0Es39yM?)nMAh4Uuv4~thfb=S5D}^JCAhPG(Lwa3@V~6vN*Br zw`{q!*lpHP2+OBq%ke(fxTCC1VK@HOd2D3iNIqsB+gTVN`Dm`97kKINtn}69+)*GX<Tv`Ic{Mn z^<*Tq%l01$H`tVfpjBttH*b#VwEZQjUMZ~$j6n->gs3^-?l?D_a%|FaIGttv-pcIs z`~S?b_KZ>oa`gQ#IhLGZ7v6dyf9qjvc(#dl>aKVhx5}kcg{(iZhCe}9;UH~Pl*qO5 zVY08);x3KeL56~CX(d;}p%`v`;>?cmXZN`nKz^bad4e%DgXxL}EbHSh$+@B^Q|bRqE3uY5w=jePthh>|U0> z(rmtTOPL;Hp$T#+%jaa&uf&iv7xUFERyUV7u$;w#${RO+fDuln&hk2Fs(LML&ZLA3$)c1d{*fluOZ(93*lTzx!1Eg<JK zEMzz|aR4nwf}$KJakMK5iz$KBWfe)=OoAeD7AD~lG)gvYCc%6Xn5PQ3w+MWs2o#V2 zr4LopYj7T7BS8TxmGsQ!^e#wG!(LFVFG2zhUMT7-R>fsgT(g`5l~b(GfDmqkqJHQR zE=GAfOWz+XJY^d|*%K-)lhYDzSa3w5?ldK{p%bNujk2ltI5xI4)o}D{l00YWCYyd8 z%XXJ~wcD<7JY~~yTzsh&bo0e4Uuc*|*C$#dO!x}6PK zC-DBCuyyMOxt)Qb(Y(UPY-jh^OBdSh-gEWaY%bk@D)zwPQ4I(@8J{%IG*`M!6@GCNy z|N8!XN(P&}zBfPQ1lznmkl*k&JF>nTzh^Xiu)a^XJ)@!Pjc_D4z8?R>56U5z@D@t9 z4Smk&0ET!1dw`=5dq$F8XA?GcWNS8f@R@J1k2b91-(1U*HpcNgN3g{k2LwjJL{J(> z&y?WY`=ZiLYuwW1?1zmV_{+zc)24u4+z8rrEGE3kpP2%`r+86$N9+}_@u=|M4T>p3 z{W$ywL$l9Fjrpj*51(DcHf^%x&n-l4;vjcvs!I4u<5j{e4OfXsX`o6(OUVl1BE_qW zBq>@Y1gV`$q)Wjnks+B>B2#iziEPPECE}#VR;A7y>Ap2zq4Fe2VTlD;Rnm2p#toa3 zYE-&}=wm9qj_5L#-c0mPmEKPDW|fAEnv>S5bQ#ghRl1z$B`RG-bbgK+fkDDaQ&hT^ z=nR#vBRWl`C8Cp5x{>H^D&0hMq)L;?ECs4Gp4M@ayGkqmI(wBS@1E532K8LbH&f)U z8c|$;bt>r>rP4J-JF4_%qMyF5 z^uAVytE1GYM(QZx7nPQXu2tzqqN`Q9iRf~b=CEx``&8PI=n`mE^&LP@W zrSpi!vmToJ0?_%=g90V8oDw9JE+P85O0OfjMx{3seN3gd6J4g#yNTYZ(q%+%CYs(} z!GmLR%uZ8Y2c~ylL?^rsz3~I(HJQbb!V7DI@B*%ZsjO_JkU{R?6sw$3DklS+Z&Xf- z%7I(c@TJO$DPZYa&G{*6%t=i*BfXc(d8DrLNN}Q6&MlP#N2#H$%DJd=;=l=1IY(7a z5;z_z=W~@Kfa9ogHmaO#aJ=(XmZ-9Fz;abNud19paCnt7UEwIZ4VZszR)%Ak%A{=v zOi5)Xs>~9Vc}-p#q?Rho^na$m1bc@~i{Gj?~0~5DJ@sBs`E!*}5&tE*ach<*Y zI=)9SE7|^HSl8EJN!G5D<$kb7iu-M*>sm*t5)AF+8nKQ&+}=KFV+Ou@DEg2ZfUDSg zI=O77ruT6tN;qMOgp1V@mX=ixKcyvu4g4g8e<_2_`6Slh{=A@#rR=j$7PZ;!s)Lzj zY`_O1r-ZDdcyXgVHLF~(tLkt3s?e#1z4+x z@5-I$bh=e+!mbeBAhOqYdGcHedv{lo&cIkEe(#}l3 z`(6Ile7147nU~YqH@g$~VRQCA-QA8i*jBdm(|;w4-WzThABY~3<=a|%X(gMqw@p+^ z5{~qg@Hk?^iukOLa^8_wX%uod;O3}#m11|Y9eW4I)Y{ZL_HLl&Ra%2#!!3VdLgFD@ z&lb%6FJ7g6JK$CFW=Z?(`MLnse_u#kYD-ROf?AlM`j*nDad0jLO8vD;f}e}-Si%0Y zucxc`XP6oo6?idj3HxbZr}jrz$Z|-W^gckF(sm%}Mt-yvnO9HJ#Gi4M|L9W|_hnX6 z@~7lJvad^inBw4AU@Y8A3wa+?>`pvG8!w2<%Sj1rdLKx}kMSBPy4IzNu14e93fVNg z54-lIA0OR^J^V77|LPMK`PGqL&wA4ZwvMdQAv)>+K4uc|#*>*!xr+0W-m0PJry2B= zE|Utw=L)i^e{W{q-;*!e&c^JA{hq*gpTto!;Cpm+Oo=)no5sW`bM0b>SGrOXp5>P{ILfBLo-FiGcmFYOHcziK7bKoa zl>IH^{em++*eizu`71ryszat;X-KNiLjska4okhAQk(rzWqgw?F3AN;Ie%ke3`C?G zFpw%a%rPfDC}KYz3gCxNW={?!$E-zy)r`|Y5gQyKDVP9Xc9vk=HHx?Bsx^haoy@&RiPAP9fS#1&buyxV}v^%^hQZxrbu zHzWt|TUM>m>gW@_w0jSmb0nw-Z!9z@C7qMrRK&Hsul_sl?B{tY>&uiiGAdS+hqvII z)K-(nyMX5;zyB)lyoZ$^>CCTb!y1ny@vd80%r_%@&S*npM{fn-xqsONYTWqn96_8N zo`=y$N*+IP0^Sf54W{|z-4v`Rbwa$M<{ftAn;HBMS*(3|9&hc+ww5RIH3jUa@>Krs zXx8ay1ixo8+jO)Cd;6#dpRJO;X9U`pJ;BX${EQAd{-e2U`OE=f8#AF%Da)ES_A+&2 zj|D^91=G!tT2AYz`eFvX*$V9qiAg=H*%?s9vlCRFS)uRv#i5`G!BDkn7X#T;^)oN%P?r_z&+NB$AA z9^z~%mrY&UvoksIj;$adn;IjT+bcniSLadWbR>&?CB(kqG|I?$nD@_OZN6>WZWcKv#FW2q=M2Q_WDVZZ<@qgoe+ML18g@rGi$mGUZ(ZE>;?!L* zL&(A#yPfuyu#dlu;Ac%{XTOa!A0(%QnDP$}3|BjP?Bp$lJufP5KJ%^!kMvMWlLoM)q zS(n7Ra=Cc9B`wzd;ASHY->OiKdsG{6A?&f766<=vnf8z{3>9vb#zCU%uaNa~mrWU= zti!QLb1T#=h~HX&(w_BRAdd%*;xu=>3mM@X%H|)73f_x`!=2{&(Vg%d~;ievGN@K+nN9Ph(_WMlh|o6TN$EJ$7u+(FAN3xq6sT_VQ@{VKMv zvFgXz*n{KK`5$uF%O~PHzdN^ii0e0E0O)yM5ilWf;|h%QQk=T@%2o5Nzq^7RJJF;6 z5i{-Zxp3z13q~ImXFe{wCMh0e;X$28OOfiH8I0$P*fTw)zvnd9CA~QZ9`ux?oj8ig zsgR|-NRKxci^`Y5=L<)}IjIC`DAtTV1fnyMDI%WYoe(e6C#m#mvut|Sh8?S%XmC=B zRa{K(&Sue7fw~$txT=l&KCGA&@tq_eq$<*s@&(n1SIfxdL$885NkK!{ zfvT83-64*LgUtq;-h?<*A(aL(MLPE(jj3J$Qxdn)EWi4ybZ{#XmK;exo3%gLJ<)l$ zBD4dmlXBq=O6;=TcPi0`xn}j#iCdp5Lhouqn^d8<=CSoBWB74f*twI@e8@beJJnuy zfkmC_7qELSx!fH|Jzu2?Q1WXH^HFf=Tqd4+iTB;jemoVF6alf6c2_y&j+%6 zghwV=i>>gy|rv+C-AsD7v)S|yvRe4(FJ(!J@>OS^{4Qpe_m&((xW z)0xMa?*6xsuD(%#{U|Zj_&I#>_-bG3bcjtl(}6!z%xq`+^0#W)`7>Sv&()%EI!zM= z@dqIaqSuTok7AJU;+hX8>Ki)uZKL5Z!v~~zo6H4O(PLU@TUW`sr6x(9#=4yCJMNpS zbR5fA;p^1GmIX9ila5aVH%_1HBCQ8i$r)%8@L&qB5#sR!Q)){@ZE50h)I%T6O+>6= zot3K=7(G~nhwL*X_Jh~ys=LA?5xe-!KOR3TTkjh|v>doBG`S7+Y zto^xQzI7o>J$J_6dn#40tgWq6WVwu}24_+KM;?o=84?Hn6x+qtEEabDmGo*8X`aQ?x-^nA7RACW zBoEpLn756x$pL1UwP~@jC_v#q#CuCnMT4R!%CjR_W74QBGzNRR@^#*1_RIO+US)9G z3)a7%<+@6#SPRg#kEXG17uxZ2Ozh1CIL=7zY;u`T3LC;S0$f ztz&4ao8Jk_Rb#k3tM3GT@&oB$E++c4l==f}e=&ydex1F1F}%|i*aQE&cY=&?wEwqv zf;L}dyDoO*zq-b*U+gVx!_80mtc+fNhTcn_FVf^Fi?*|18>LgesIHP8yrfjE7x0AA zSG^WIUxU)O6Ite^=-IWMls7gHVnC8ouz_O3yyt;2DAspIJ77);Fr}<_u|pYx6b4?} zcBR3>Lm!i8YegioLXKDtQjU0A`MVZ>@uqim(n#bq)4xv;2d7J&$FS9xJ>9Ke7oaZA z;-cer(JKYq<5st4zTVdh9$Z^zu(smp=E2vCb8GA_&K-|9iWKw(hL^&T`GSl zQ;||qHfORAu9)o8rP6y;B0Ke+v&-^jq#s_Vr1hiOjqgJE-kFTQ67APvi8_~f!ht8p z4=cQsk7p^h7Y1m)GQ5w$Gus}nP zc4`5#X4@4X0_iOv9BLZcs6w`&47>)9{#v-)s1*hJR@2hP*M>UfM?Y1C$&R64%@Z|FH*@;sXmMIg^fPO5oTkdJ z{IM#J9Z30QYH@67%FkVkdus9Bf2;C1e(9IbC20(p6w0r$Nfm&_qx=fAcz_mn*W%<` zpkMhTRUS64^2^iWZM69AhibZ6izjLE5G9V+Nzxyv0(qK1v=)C=i@R&_#ag`aPgVY@ z7MHZRti`ML{(URoxVQYTWL?VgI4+yyh{PB*uU zn7yDK8}Vp7YkcIl_vj-}JNF;W-2&t1|Ji@z+aD#K%{VfS{qi`okFFKZwTj`>TYEZk zo>N>o&u%W9=P+Z9q1+`$=#H1RQFBZX$BjhTeM4==SkQBoI9vS0zqiiWo^$TjigS)> zmD4)VnG2j^+cpEk^iFlh@hnIQz*+bLOeO=+;Ji&e+X?GsdXRr9Ag=3O+(`tV0C9 z)?TeRuPKh47v#KRp0}lS7pNRUi=3Y`=XcD7^Rqa!gr^3j4b=UJXpTFK5L!p2q3xh= z{44&yYeQ5F$8|?&F5AhV=M3F^ID^iMqicP0|2$)12O}ibsi<@1Iagh4PG@#y^Zx1Q z(JVKrH{SI~uR_T;K4rwgoKgk4I3A5+&P47 ztqrY7%`L8+-xR1i%Jv&(YV@pWT~6{eu`#(&B!t&B`M=?KT!r`-H%lk$bR9xlw{FF? zM!wcDv`08+HrF#0SG5HQvwkW)q0Zeu z(wV$2I9Sy*XU=(ktCl_E@0A=ExC&P2&C)%*t}T?m^m`3?J6#Wo_p;NqDgA=id6gF1 z=>j^_Tveejtxb-c2^*Rzrdh+EH^)~W?jnNeCsIo^P8n1;3@Qe;8wPg1sqWu%cm5p5H6cW6 z@_C^B&f^?v&i{XyyN@B$fG|sy({ZIMbUNQsvtH*{dQ+$KE}f{?jdpZ_-Z^u|`K3qn zy5L^a;%Ky3)m7-|2q*LuJ&y6Jqb=Vr!h!R#bi*bS%lYV{IhS;`mpQu94y|;3dQkon z+ys;@y)Vc4;@{Xb|5lP+(ugh5{G4|dyv={C4)OlK#`XU-uJiwYHm+e_{J$`+OXoZ3 zYWI(K)@{?XM*iSe?z+tmgG-od>-~R%FvTbBAL6U~$-Xo_Nas6n1dm%N+L*3knudag zNgBp!7_FgMLw61B`O-Z>x*_c^>D2OSG_2OJO2cvucbA3)>wG5tqD9wgh}}eK4*vU> zf_WO|Xqc&Cx`u*=DH_IUXx7lZ^mSBIx;qclhvuD*EE{eD)rgb;e@1Y`zwfihjVTd< z=p^7Mjm`n?)o6N>a2vrLG7>PP4UT!x^w0b*YV;*wRtOpm{s!Q;2vp`Vpe~f-?t12< zG7$X`C_w<$YxG?pACA-74>G_F2-E|@VG-(Rgb3-woV%b2Ck%(8LIE>@jR@NV zP$qEdD0Bw=Y+(NkJd5zbD}nG4aXARo>dj*?_qgwn_67nMAW)Ajzj;#g5Lkmiy&*h|o7D!$Q~?_o za@=*$goTT=5d}`oLmK$mz>RPn(Fkn@Ml8ns)BHt(_yvI!knrQzILrf-5qJZEDysvA zEKv)EJCEzF(S)wcR6QUpLZEWUIbMd*rKL;w*6Zpfwj8+Z4J=FW3*Nx`w^-FJmattR zmKhSFfx8e^fZh#^EmG-jzylh67)WocY=aEpZwQA$Hv-qKM2A2V&Rm6&13eqK34!*H zYM^Tgbq4#t8N>+2ab|y=f;P}JPrHE`YtaMn2{Yg2xQ?I+Zz0gANWlH2DqRL_LZG~a z-@XqI3gnLg$F9dzf*ub%xB;C6eHhp)e0=#A7IEpcLc6OptFE*%NCWt4XEF$ zR%Qm)Ae2MC4*2mlC@JW2VD5)V1DyvvfUQfxiye?~JN+0`Lz6 z3H%4Z?6Ybk2o2}Z0r2C1N#`+@(0oFl3y=ftdm$e~ga|E(a^P#

?2_03RWcwc~mP zs{nzPUmb8-tx9JDx$iLrkS9Ega2<3FP)4AU;I5&;*P(Ra6V?)qP7-$g0UK6661st? z_z{JY5HS9Rx+(PrEv7_25ja&8h$=vapX2^A2AA`qW2QR5Si z)A)q>3$+O0T8$>$qtS$CH2MPYo<gn>U(tnFn*aa+ diff --git a/pipenv/patched/notpip/_vendor/distlib/wheel.py b/pipenv/patched/notpip/_vendor/distlib/wheel.py index 0c8efad9ae..bd179383ac 100644 --- a/pipenv/patched/notpip/_vendor/distlib/wheel.py +++ b/pipenv/patched/notpip/_vendor/distlib/wheel.py @@ -684,7 +684,7 @@ def _get_dylib_cache(self): if cache is None: # Use native string to avoid issues on 2.x: see Python #20140. base = os.path.join(get_cache_base(), str('dylib-cache'), - sys.version[:3]) + '%s.%s' % sys.version_info[:2]) cache = Cache(base) return cache diff --git a/pipenv/patched/notpip/_vendor/distro.pyi b/pipenv/patched/notpip/_vendor/distro.pyi new file mode 100644 index 0000000000..c7ea94b37b --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distro.pyi @@ -0,0 +1 @@ +from distro import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/html5lib.pyi b/pipenv/patched/notpip/_vendor/html5lib.pyi new file mode 100644 index 0000000000..9bc9af95e3 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/html5lib.pyi @@ -0,0 +1 @@ +from html5lib import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/idna.pyi b/pipenv/patched/notpip/_vendor/idna.pyi new file mode 100644 index 0000000000..7410d72fe7 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/idna.pyi @@ -0,0 +1 @@ +from idna import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/ipaddress.py b/pipenv/patched/notpip/_vendor/ipaddress.py index f2d0766842..3e6f9e499c 100644 --- a/pipenv/patched/notpip/_vendor/ipaddress.py +++ b/pipenv/patched/notpip/_vendor/ipaddress.py @@ -14,7 +14,7 @@ import itertools import struct -__version__ = '1.0.22' +__version__ = '1.0.23' # Compatibility functions _compat_int_types = (int,) @@ -1103,7 +1103,8 @@ def _is_subnet_of(a, b): try: # Always false if one is v4 and the other is v6. if a._version != b._version: - raise TypeError("%s and %s are not of the same version" (a, b)) + raise TypeError( + "%s and %s are not of the same version" % (a, b)) return (b.network_address <= a.network_address and b.broadcast_address >= a.broadcast_address) except AttributeError: diff --git a/pipenv/patched/notpip/_vendor/ipaddress.pyi b/pipenv/patched/notpip/_vendor/ipaddress.pyi new file mode 100644 index 0000000000..eef994d945 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/ipaddress.pyi @@ -0,0 +1 @@ +from ipaddress import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/msgpack.pyi b/pipenv/patched/notpip/_vendor/msgpack.pyi new file mode 100644 index 0000000000..4e69b88679 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/msgpack.pyi @@ -0,0 +1 @@ +from msgpack import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/packaging.pyi b/pipenv/patched/notpip/_vendor/packaging.pyi new file mode 100644 index 0000000000..3458a3d637 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/packaging.pyi @@ -0,0 +1 @@ +from packaging import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE b/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE index 4947287f7b..f433b1a53f 100644 --- a/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE +++ b/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE @@ -174,4 +174,4 @@ incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - END OF TERMS AND CONDITIONS \ No newline at end of file + END OF TERMS AND CONDITIONS diff --git a/pipenv/patched/notpip/_vendor/packaging/__about__.py b/pipenv/patched/notpip/_vendor/packaging/__about__.py index dc95138d04..08d2c892b8 100644 --- a/pipenv/patched/notpip/_vendor/packaging/__about__.py +++ b/pipenv/patched/notpip/_vendor/packaging/__about__.py @@ -18,7 +18,7 @@ __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "19.2" +__version__ = "20.1" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" diff --git a/pipenv/patched/notpip/_vendor/packaging/_compat.py b/pipenv/patched/notpip/_vendor/packaging/_compat.py index 25da473c19..a145f7eeb3 100644 --- a/pipenv/patched/notpip/_vendor/packaging/_compat.py +++ b/pipenv/patched/notpip/_vendor/packaging/_compat.py @@ -5,6 +5,11 @@ import sys +from ._typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import Any, Dict, Tuple, Type + PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 @@ -18,14 +23,16 @@ def with_metaclass(meta, *bases): + # type: (Type[Any], Tuple[Type[Any], ...]) -> Any """ Create a base class with a metaclass. """ # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): + class metaclass(meta): # type: ignore def __new__(cls, name, this_bases, d): + # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any return meta(name, bases, d) return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/pipenv/patched/notpip/_vendor/packaging/_structures.py b/pipenv/patched/notpip/_vendor/packaging/_structures.py index 68dcca634d..800d5c5588 100644 --- a/pipenv/patched/notpip/_vendor/packaging/_structures.py +++ b/pipenv/patched/notpip/_vendor/packaging/_structures.py @@ -4,65 +4,83 @@ from __future__ import absolute_import, division, print_function -class Infinity(object): +class InfinityType(object): def __repr__(self): + # type: () -> str return "Infinity" def __hash__(self): + # type: () -> int return hash(repr(self)) def __lt__(self, other): + # type: (object) -> bool return False def __le__(self, other): + # type: (object) -> bool return False def __eq__(self, other): + # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): + # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): + # type: (object) -> bool return True def __ge__(self, other): + # type: (object) -> bool return True def __neg__(self): + # type: (object) -> NegativeInfinityType return NegativeInfinity -Infinity = Infinity() +Infinity = InfinityType() -class NegativeInfinity(object): +class NegativeInfinityType(object): def __repr__(self): + # type: () -> str return "-Infinity" def __hash__(self): + # type: () -> int return hash(repr(self)) def __lt__(self, other): + # type: (object) -> bool return True def __le__(self, other): + # type: (object) -> bool return True def __eq__(self, other): + # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): + # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): + # type: (object) -> bool return False def __ge__(self, other): + # type: (object) -> bool return False def __neg__(self): + # type: (object) -> InfinityType return Infinity -NegativeInfinity = NegativeInfinity() +NegativeInfinity = NegativeInfinityType() diff --git a/pipenv/patched/notpip/_vendor/packaging/_typing.py b/pipenv/patched/notpip/_vendor/packaging/_typing.py new file mode 100644 index 0000000000..6eb36e5a2a --- /dev/null +++ b/pipenv/patched/notpip/_vendor/packaging/_typing.py @@ -0,0 +1,39 @@ +"""For neatly implementing static typing in packaging. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In packaging, all static-typing related imports should be guarded as follows: + + from pipenv.patched.notpip._vendor.packaging._typing import MYPY_CHECK_RUNNING + + if MYPY_CHECK_RUNNING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +MYPY_CHECK_RUNNING = False + +if MYPY_CHECK_RUNNING: # pragma: no cover + import typing + + cast = typing.cast +else: + # typing's cast() is needed at runtime, but we don't want to import typing. + # Thus, we use a dummy no-op version, which we tell mypy to ignore. + def cast(type_, value): # type: ignore + return value diff --git a/pipenv/patched/notpip/_vendor/packaging/markers.py b/pipenv/patched/notpip/_vendor/packaging/markers.py index aef30331c2..403909469d 100644 --- a/pipenv/patched/notpip/_vendor/packaging/markers.py +++ b/pipenv/patched/notpip/_vendor/packaging/markers.py @@ -13,8 +13,14 @@ from pipenv.patched.notpip._vendor.pyparsing import Literal as L # noqa from ._compat import string_types +from ._typing import MYPY_CHECK_RUNNING from .specifiers import Specifier, InvalidSpecifier +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import Any, Callable, Dict, List, Optional, Tuple, Union + + Operator = Callable[[str, str], bool] + __all__ = [ "InvalidMarker", @@ -46,30 +52,37 @@ class UndefinedEnvironmentName(ValueError): class Node(object): def __init__(self, value): + # type: (Any) -> None self.value = value def __str__(self): + # type: () -> str return str(self.value) def __repr__(self): + # type: () -> str return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) def serialize(self): + # type: () -> str raise NotImplementedError class Variable(Node): def serialize(self): + # type: () -> str return str(self) class Value(Node): def serialize(self): + # type: () -> str return '"{0}"'.format(self) class Op(Node): def serialize(self): + # type: () -> str return str(self) @@ -85,13 +98,13 @@ def serialize(self): | L("python_version") | L("sys_platform") | L("os_name") - | L("os.name") + | L("os.name") # PEP-345 | L("sys.platform") # PEP-345 | L("platform.version") # PEP-345 | L("platform.machine") # PEP-345 | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # PEP-345 - | L("extra") # undocumented setuptools legacy + | L("python_implementation") # undocumented setuptools legacy + | L("extra") # PEP-508 ) ALIASES = { "os.name": "os_name", @@ -131,6 +144,7 @@ def serialize(self): def _coerce_parse_result(results): + # type: (Union[ParseResults, List[Any]]) -> List[Any] if isinstance(results, ParseResults): return [_coerce_parse_result(i) for i in results] else: @@ -138,6 +152,8 @@ def _coerce_parse_result(results): def _format_marker(marker, first=True): + # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str + assert isinstance(marker, (list, tuple, string_types)) # Sometimes we have a structure like [[...]] which is a single item list @@ -172,10 +188,11 @@ def _format_marker(marker, first=True): "!=": operator.ne, ">=": operator.ge, ">": operator.gt, -} +} # type: Dict[str, Operator] def _eval_op(lhs, op, rhs): + # type: (str, Op, str) -> bool try: spec = Specifier("".join([op.serialize(), rhs])) except InvalidSpecifier: @@ -183,7 +200,7 @@ def _eval_op(lhs, op, rhs): else: return spec.contains(lhs) - oper = _operators.get(op.serialize()) + oper = _operators.get(op.serialize()) # type: Optional[Operator] if oper is None: raise UndefinedComparison( "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) @@ -192,13 +209,18 @@ def _eval_op(lhs, op, rhs): return oper(lhs, rhs) -_undefined = object() +class Undefined(object): + pass + + +_undefined = Undefined() def _get_env(environment, name): - value = environment.get(name, _undefined) + # type: (Dict[str, str], str) -> str + value = environment.get(name, _undefined) # type: Union[str, Undefined] - if value is _undefined: + if isinstance(value, Undefined): raise UndefinedEnvironmentName( "{0!r} does not exist in evaluation environment.".format(name) ) @@ -207,7 +229,8 @@ def _get_env(environment, name): def _evaluate_markers(markers, environment): - groups = [[]] + # type: (List[Any], Dict[str, str]) -> bool + groups = [[]] # type: List[List[bool]] for marker in markers: assert isinstance(marker, (list, tuple, string_types)) @@ -234,6 +257,7 @@ def _evaluate_markers(markers, environment): def format_full_version(info): + # type: (sys._version_info) -> str version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": @@ -242,9 +266,13 @@ def format_full_version(info): def default_environment(): + # type: () -> Dict[str, str] if hasattr(sys, "implementation"): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name + # Ignoring the `sys.implementation` reference for type checking due to + # mypy not liking that the attribute doesn't exist in Python 2.7 when + # run with the `--py27` flag. + iver = format_full_version(sys.implementation.version) # type: ignore + implementation_name = sys.implementation.name # type: ignore else: iver = "0" implementation_name = "" @@ -266,6 +294,7 @@ def default_environment(): class Marker(object): def __init__(self, marker): + # type: (str) -> None try: self._markers = _coerce_parse_result(MARKER.parseString(marker)) except ParseException as e: @@ -275,12 +304,15 @@ def __init__(self, marker): raise InvalidMarker(err_str) def __str__(self): + # type: () -> str return _format_marker(self._markers) def __repr__(self): + # type: () -> str return "".format(str(self)) def evaluate(self, environment=None): + # type: (Optional[Dict[str, str]]) -> bool """Evaluate a marker. Return the boolean from evaluating the given marker against the diff --git a/pipenv/patched/notpip/_vendor/packaging/py.typed b/pipenv/patched/notpip/_vendor/packaging/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/patched/notpip/_vendor/packaging/requirements.py b/pipenv/patched/notpip/_vendor/packaging/requirements.py index a3de76734c..0a9070845a 100644 --- a/pipenv/patched/notpip/_vendor/packaging/requirements.py +++ b/pipenv/patched/notpip/_vendor/packaging/requirements.py @@ -11,9 +11,13 @@ from pipenv.patched.notpip._vendor.pyparsing import Literal as L # noqa from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urlparse +from ._typing import MYPY_CHECK_RUNNING from .markers import MARKER_EXPR, Marker from .specifiers import LegacySpecifier, Specifier, SpecifierSet +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import List + class InvalidRequirement(ValueError): """ @@ -89,6 +93,7 @@ class Requirement(object): # TODO: Can we normalize the name and extra name? def __init__(self, requirement_string): + # type: (str) -> None try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: @@ -116,7 +121,8 @@ def __init__(self, requirement_string): self.marker = req.marker if req.marker else None def __str__(self): - parts = [self.name] + # type: () -> str + parts = [self.name] # type: List[str] if self.extras: parts.append("[{0}]".format(",".join(sorted(self.extras)))) @@ -135,4 +141,5 @@ def __str__(self): return "".join(parts) def __repr__(self): + # type: () -> str return "".format(str(self)) diff --git a/pipenv/patched/notpip/_vendor/packaging/specifiers.py b/pipenv/patched/notpip/_vendor/packaging/specifiers.py index 743576a080..94987486d4 100644 --- a/pipenv/patched/notpip/_vendor/packaging/specifiers.py +++ b/pipenv/patched/notpip/_vendor/packaging/specifiers.py @@ -9,8 +9,26 @@ import re from ._compat import string_types, with_metaclass +from ._typing import MYPY_CHECK_RUNNING from .version import Version, LegacyVersion, parse +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import ( + List, + Dict, + Union, + Iterable, + Iterator, + Optional, + Callable, + Tuple, + FrozenSet, + ) + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] + CallableOperator = Callable[[ParsedVersion, str], bool] + class InvalidSpecifier(ValueError): """ @@ -18,9 +36,10 @@ class InvalidSpecifier(ValueError): """ -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore @abc.abstractmethod def __str__(self): + # type: () -> str """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. @@ -28,12 +47,14 @@ def __str__(self): @abc.abstractmethod def __hash__(self): + # type: () -> int """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod def __eq__(self, other): + # type: (object) -> bool """ Returns a boolean representing whether or not the two Specifier like objects are equal. @@ -41,6 +62,7 @@ def __eq__(self, other): @abc.abstractmethod def __ne__(self, other): + # type: (object) -> bool """ Returns a boolean representing whether or not the two Specifier like objects are not equal. @@ -48,6 +70,7 @@ def __ne__(self, other): @abc.abstractproperty def prereleases(self): + # type: () -> Optional[bool] """ Returns whether or not pre-releases as a whole are allowed by this specifier. @@ -55,6 +78,7 @@ def prereleases(self): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None """ Sets whether or not pre-releases as a whole are allowed by this specifier. @@ -62,12 +86,14 @@ def prereleases(self, value): @abc.abstractmethod def contains(self, item, prereleases=None): + # type: (str, Optional[bool]) -> bool """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. @@ -76,19 +102,24 @@ def filter(self, iterable, prereleases=None): class _IndividualSpecifier(BaseSpecifier): - _operators = {} + _operators = {} # type: Dict[str, str] def __init__(self, spec="", prereleases=None): + # type: (str, Optional[bool]) -> None match = self._regex.search(spec) if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - self._spec = (match.group("operator").strip(), match.group("version").strip()) + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) # type: Tuple[str, str] # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases def __repr__(self): + # type: () -> str pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None @@ -98,15 +129,18 @@ def __repr__(self): return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) def __str__(self): + # type: () -> str return "{0}{1}".format(*self._spec) def __hash__(self): + # type: () -> int return hash(self._spec) def __eq__(self, other): + # type: (object) -> bool if isinstance(other, string_types): try: - other = self.__class__(other) + other = self.__class__(str(other)) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): @@ -115,9 +149,10 @@ def __eq__(self, other): return self._spec == other._spec def __ne__(self, other): + # type: (object) -> bool if isinstance(other, string_types): try: - other = self.__class__(other) + other = self.__class__(str(other)) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): @@ -126,52 +161,67 @@ def __ne__(self, other): return self._spec != other._spec def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) + # type: (str) -> CallableOperator + operator_callable = getattr( + self, "_compare_{0}".format(self._operators[op]) + ) # type: CallableOperator + return operator_callable def _coerce_version(self, version): + # type: (UnparsedVersion) -> ParsedVersion if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property def operator(self): + # type: () -> str return self._spec[0] @property def version(self): + # type: () -> str return self._spec[1] @property def prereleases(self): + # type: () -> Optional[bool] return self._prereleases @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value def __contains__(self, item): + # type: (str) -> bool return self.contains(item) def contains(self, item, prereleases=None): + # type: (UnparsedVersion, Optional[bool]) -> bool + # Determine if prereleases are to be allowed or not. if prereleases is None: prereleases = self.prereleases # Normalize item to a Version or LegacyVersion, this allows us to have # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) + normalized_item = self._coerce_version(item) # Determine if we should be supporting prereleases in this specifier # or not, if we do not support prereleases than we can short circuit # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: + if normalized_item.is_prerelease and not prereleases: return False # Actually do the comparison to determine if this item is contained # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) + operator_callable = self._get_operator(self.operator) # type: CallableOperator + return operator_callable(normalized_item, self.version) def filter(self, iterable, prereleases=None): + # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion] + yielded = False found_prereleases = [] @@ -230,32 +280,43 @@ class LegacySpecifier(_IndividualSpecifier): } def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version def _compare_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective == self._coerce_version(spec) def _compare_not_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective != self._coerce_version(spec) def _compare_less_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective <= self._coerce_version(spec) def _compare_greater_than_equal(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective >= self._coerce_version(spec) def _compare_less_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective < self._coerce_version(spec) def _compare_greater_than(self, prospective, spec): + # type: (LegacyVersion, str) -> bool return prospective > self._coerce_version(spec) -def _require_version_compare(fn): +def _require_version_compare( + fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) +): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] @functools.wraps(fn) def wrapped(self, prospective, spec): + # type: (Specifier, ParsedVersion, str) -> bool if not isinstance(prospective, Version): return False return fn(self, prospective, spec) @@ -373,6 +434,8 @@ class Specifier(_IndividualSpecifier): @_require_version_compare def _compare_compatible(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of @@ -400,56 +463,67 @@ def _compare_compatible(self, prospective, spec): @_require_version_compare def _compare_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool + # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. prospective = Version(prospective.public) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* + split_spec = _version_split(spec[:-2]) # Remove the trailing .* # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. - prospective = _version_split(str(prospective)) + split_prospective = _version_split(str(prospective)) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. - prospective = prospective[: len(spec)] + shortened_prospective = split_prospective[: len(split_spec)] # Pad out our two sides with zeros so that they both equal the same # length. - spec, prospective = _pad_version(spec, prospective) + padded_spec, padded_prospective = _pad_version( + split_spec, shortened_prospective + ) + + return padded_prospective == padded_spec else: # Convert our spec string into a Version - spec = Version(spec) + spec_version = Version(spec) # If the specifier does not have a local segment, then we want to # act as if the prospective version also does not have a local # segment. - if not spec.local: + if not spec_version.local: prospective = Version(prospective.public) - return prospective == spec + return prospective == spec_version @_require_version_compare def _compare_not_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool return not self._compare_equal(prospective, spec) @_require_version_compare def _compare_less_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool return prospective <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): + # type: (ParsedVersion, str) -> bool return prospective >= Version(spec) @_require_version_compare - def _compare_less_than(self, prospective, spec): + def _compare_less_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + # Convert our spec to a Version instance, since we'll want to work with # it as a version. - spec = Version(spec) + spec = Version(spec_str) # Check to see if the prospective version is less than the spec # version. If it's not we can short circuit and just return False now @@ -471,10 +545,12 @@ def _compare_less_than(self, prospective, spec): return True @_require_version_compare - def _compare_greater_than(self, prospective, spec): + def _compare_greater_than(self, prospective, spec_str): + # type: (ParsedVersion, str) -> bool + # Convert our spec to a Version instance, since we'll want to work with # it as a version. - spec = Version(spec) + spec = Version(spec_str) # Check to see if the prospective version is greater than the spec # version. If it's not we can short circuit and just return False now @@ -502,10 +578,13 @@ def _compare_greater_than(self, prospective, spec): return True def _compare_arbitrary(self, prospective, spec): + # type: (Version, str) -> bool return str(prospective).lower() == str(spec).lower() @property def prereleases(self): + # type: () -> bool + # If there is an explicit prereleases set for this, then we'll just # blindly use that. if self._prereleases is not None: @@ -530,6 +609,7 @@ def prereleases(self): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value @@ -537,7 +617,8 @@ def prereleases(self, value): def _version_split(version): - result = [] + # type: (str) -> List[str] + result = [] # type: List[str] for item in version.split("."): match = _prefix_regex.search(item) if match: @@ -548,6 +629,7 @@ def _version_split(version): def _pad_version(left, right): + # type: (List[str], List[str]) -> Tuple[List[str], List[str]] left_split, right_split = [], [] # Get the release segment of our versions @@ -567,14 +649,16 @@ def _pad_version(left, right): class SpecifierSet(BaseSpecifier): def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and + # type: (str, Optional[bool]) -> None + + # Split on , to break each individual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. parsed = set() - for specifier in specifiers: + for specifier in split_specifiers: try: parsed.add(Specifier(specifier)) except InvalidSpecifier: @@ -588,6 +672,7 @@ def __init__(self, specifiers="", prereleases=None): self._prereleases = prereleases def __repr__(self): + # type: () -> str pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None @@ -597,12 +682,15 @@ def __repr__(self): return "".format(str(self), pre) def __str__(self): + # type: () -> str return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self): + # type: () -> int return hash(self._specs) def __and__(self, other): + # type: (Union[SpecifierSet, str]) -> SpecifierSet if isinstance(other, string_types): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): @@ -626,9 +714,8 @@ def __and__(self, other): return specifier def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -636,9 +723,8 @@ def __eq__(self, other): return self._specs == other._specs def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): + # type: (object) -> bool + if isinstance(other, (string_types, _IndividualSpecifier)): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented @@ -646,13 +732,17 @@ def __ne__(self, other): return self._specs != other._specs def __len__(self): + # type: () -> int return len(self._specs) def __iter__(self): + # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] return iter(self._specs) @property def prereleases(self): + # type: () -> Optional[bool] + # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: @@ -670,12 +760,16 @@ def prereleases(self): @prereleases.setter def prereleases(self, value): + # type: (bool) -> None self._prereleases = value def __contains__(self, item): + # type: (Union[ParsedVersion, str]) -> bool return self.contains(item) def contains(self, item, prereleases=None): + # type: (Union[ParsedVersion, str], Optional[bool]) -> bool + # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): item = parse(item) @@ -701,7 +795,13 @@ def contains(self, item, prereleases=None): # will always return True, this is an explicit design decision. return all(s.contains(item, prereleases=prereleases) for s in self._specs) - def filter(self, iterable, prereleases=None): + def filter( + self, + iterable, # type: Iterable[Union[ParsedVersion, str]] + prereleases=None, # type: Optional[bool] + ): + # type: (...) -> Iterable[Union[ParsedVersion, str]] + # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. @@ -719,8 +819,8 @@ def filter(self, iterable, prereleases=None): # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: - filtered = [] - found_prereleases = [] + filtered = [] # type: List[Union[ParsedVersion, str]] + found_prereleases = [] # type: List[Union[ParsedVersion, str]] for item in iterable: # Ensure that we some kind of Version class for this item. diff --git a/pipenv/patched/notpip/_vendor/packaging/tags.py b/pipenv/patched/notpip/_vendor/packaging/tags.py index ec9942f0f6..60a69d8f94 100644 --- a/pipenv/patched/notpip/_vendor/packaging/tags.py +++ b/pipenv/patched/notpip/_vendor/packaging/tags.py @@ -13,12 +13,37 @@ EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] del imp +import logging +import os import platform import re +import struct import sys import sysconfig import warnings +from ._typing import MYPY_CHECK_RUNNING, cast + +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import ( + Dict, + FrozenSet, + IO, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + ) + + PythonVersion = Sequence[int] + MacVersion = Tuple[int, int] + GlibcVersion = Tuple[int, int] + + +logger = logging.getLogger(__name__) INTERPRETER_SHORT_NAMES = { "python": "py", # Generic. @@ -26,7 +51,7 @@ "pypy": "pp", "ironpython": "ip", "jython": "jy", -} +} # type: Dict[str, str] _32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 @@ -37,23 +62,31 @@ class Tag(object): __slots__ = ["_interpreter", "_abi", "_platform"] def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None self._interpreter = interpreter.lower() self._abi = abi.lower() self._platform = platform.lower() @property def interpreter(self): + # type: () -> str return self._interpreter @property def abi(self): + # type: () -> str return self._abi @property def platform(self): + # type: () -> str return self._platform def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Tag): + return NotImplemented + return ( (self.platform == other.platform) and (self.abi == other.abi) @@ -61,16 +94,20 @@ def __eq__(self, other): ) def __hash__(self): + # type: () -> int return hash((self._interpreter, self._abi, self._platform)) def __str__(self): + # type: () -> str return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) def __repr__(self): + # type: () -> str return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) def parse_tag(tag): + # type: (str) -> FrozenSet[Tag] tags = set() interpreters, abis, platforms = tag.split("-") for interpreter in interpreters.split("."): @@ -80,20 +117,54 @@ def parse_tag(tag): return frozenset(tags) +def _warn_keyword_parameter(func_name, kwargs): + # type: (str, Dict[str, bool]) -> bool + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + elif len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError( + "{}() got an unexpected keyword argument {!r}".format(func_name, arg) + ) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + # type: (str, bool) -> Union[int, str, None] + value = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + def _normalize_string(string): + # type: (str) -> str return string.replace(".", "_").replace("-", "_") -def _cpython_interpreter(py_version): - # TODO: Is using py_version_nodot for interpreter version critical? - return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1]) +def _abi3_applies(python_version): + # type: (PythonVersion) -> bool + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) -def _cpython_abis(py_version): +def _cpython_abis(py_version, warn=False): + # type: (PythonVersion, bool) -> List[str] + py_version = tuple(py_version) # To allow for version comparison. abis = [] version = "{}{}".format(*py_version[:2]) debug = pymalloc = ucs4 = "" - with_debug = sysconfig.get_config_var("Py_DEBUG") + with_debug = _get_config_var("Py_DEBUG", warn) has_refcount = hasattr(sys, "gettotalrefcount") # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled # extension modules is the best option. @@ -102,11 +173,11 @@ def _cpython_abis(py_version): if with_debug or (with_debug is None and (has_refcount or has_ext)): debug = "d" if py_version < (3, 8): - with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC") + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) if with_pymalloc or with_pymalloc is None: pymalloc = "m" if py_version < (3, 3): - unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE") + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) if unicode_size == 4 or ( unicode_size is None and sys.maxunicode == 0x10FFFF ): @@ -124,86 +195,151 @@ def _cpython_abis(py_version): return abis -def _cpython_tags(py_version, interpreter, abis, platforms): +def cpython_tags( + python_version=None, # type: Optional[PythonVersion] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[:2] + + if len(python_version) < 2: + interpreter = "cp{}".format(python_version[0]) + else: + interpreter = "cp{}{}".format(*python_version[:2]) + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) for abi in abis: for platform_ in platforms: yield Tag(interpreter, abi, platform_) - for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): - yield tag + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): yield tag - # PEP 384 was first implemented in Python 3.2. - for minor_version in range(py_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{major}{minor}".format( - major=py_version[0], minor=minor_version - ) - yield Tag(interpreter, "abi3", platform_) - -def _pypy_interpreter(): - return "pp{py_major}{pypy_major}{pypy_minor}".format( - py_major=sys.version_info[0], - pypy_major=sys.pypy_version_info.major, - pypy_minor=sys.pypy_version_info.minor, - ) + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{major}{minor}".format( + major=python_version[0], minor=minor_version + ) + yield Tag(interpreter, "abi3", platform_) def _generic_abi(): + # type: () -> Iterator[str] abi = sysconfig.get_config_var("SOABI") if abi: - return _normalize_string(abi) - else: - return "none" + yield _normalize_string(abi) -def _pypy_tags(py_version, interpreter, abi, platforms): - for tag in (Tag(interpreter, abi, platform) for platform in platforms): - yield tag - for tag in (Tag(interpreter, "none", platform) for platform in platforms): - yield tag +def generic_tags( + interpreter=None, # type: Optional[str] + abis=None, # type: Optional[Iterable[str]] + platforms=None, # type: Optional[Iterable[str]] + **kwargs # type: bool +): + # type: (...) -> Iterator[Tag] + """ + Yields the tags for a generic interpreter. + The tags consist of: + - -- -def _generic_tags(interpreter, py_version, abi, platforms): - for tag in (Tag(interpreter, abi, platform) for platform in platforms): - yield tag - if abi != "none": - tags = (Tag(interpreter, "none", platform_) for platform_ in platforms) - for tag in tags: - yield tag + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) def _py_interpreter_range(py_version): + # type: (PythonVersion) -> Iterator[str] """ - Yield Python versions in descending order. + Yields Python versions in descending order. After the latest version, the major-only version will be yielded, and then - all following versions up to 'end'. + all previous versions of that major version. """ - yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1]) + if len(py_version) > 1: + yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1]) yield "py{major}".format(major=py_version[0]) - for minor in range(py_version[1] - 1, -1, -1): - yield "py{major}{minor}".format(major=py_version[0], minor=minor) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{major}{minor}".format(major=py_version[0], minor=minor) -def _independent_tags(interpreter, py_version, platforms): +def compatible_tags( + python_version=None, # type: Optional[PythonVersion] + interpreter=None, # type: Optional[str] + platforms=None, # type: Optional[Iterable[str]] +): + # type: (...) -> Iterator[Tag] """ - Return the sequence of tags that are consistent across implementations. + Yields the sequence of tags that are compatible with a specific version of Python. The tags consist of: - py*-none- - - -none-any + - -none-any # ... if `interpreter` is provided. - py*-none-any """ - for version in _py_interpreter_range(py_version): + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): for platform_ in platforms: yield Tag(version, "none", platform_) - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(py_version): + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): yield Tag(version, "none", "any") def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + # type: (str, bool) -> str if not is_32bit: return arch @@ -214,6 +350,7 @@ def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): def _mac_binary_formats(version, cpu_arch): + # type: (MacVersion, str) -> List[str] formats = [cpu_arch] if cpu_arch == "x86_64": if version < (10, 4): @@ -240,32 +377,42 @@ def _mac_binary_formats(version, cpu_arch): return formats -def _mac_platforms(version=None, arch=None): - version_str, _, cpu_arch = platform.mac_ver() +def mac_platforms(version=None, arch=None): + # type: (Optional[MacVersion], Optional[str]) -> Iterator[str] + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() # type: ignore if version is None: - version = tuple(map(int, version_str.split(".")[:2])) + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version if arch is None: arch = _mac_arch(cpu_arch) - platforms = [] + else: + arch = arch for minor_version in range(version[1], -1, -1): compat_version = version[0], minor_version binary_formats = _mac_binary_formats(compat_version, arch) for binary_format in binary_formats: - platforms.append( - "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, ) - return platforms # From PEP 513. def _is_manylinux_compatible(name, glibc_version): + # type: (str, GlibcVersion) -> bool # Check for presence of _manylinux module. try: - import _manylinux + import _manylinux # noqa return bool(getattr(_manylinux, name + "_compatible")) except (ImportError, AttributeError): @@ -276,14 +423,50 @@ def _is_manylinux_compatible(name, glibc_version): def _glibc_version_string(): + # type: () -> Optional[str] # Returns glibc version string, or None if not using glibc. - import ctypes + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + # type: () -> Optional[str] + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". + version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821 + "CS_GNU_LIBC_VERSION" + ) + assert version_string is not None + _, version = version_string.split() # type: Tuple[str, str] + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes(): + # type: () -> Optional[str] + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen # manpage says, "If filename is NULL, then the returned handle is for the # main program". This way we can let the linker do the work to figure out # which libc our process is actually using. - process_namespace = ctypes.CDLL(None) + # + # Note: typeshed is wrong here so we are ignoring this line. + process_namespace = ctypes.CDLL(None) # type: ignore try: gnu_get_libc_version = process_namespace.gnu_get_libc_version except AttributeError: @@ -293,7 +476,7 @@ def _glibc_version_string(): # Call gnu_get_libc_version, which returns a string like "2.5" gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() + version_str = gnu_get_libc_version() # type: str # py2 / py3 compatibility: if not isinstance(version_str, str): version_str = version_str.decode("ascii") @@ -303,6 +486,7 @@ def _glibc_version_string(): # Separated out from have_compatible_glibc for easier unit testing. def _check_glibc_version(version_str, required_major, minimum_minor): + # type: (str, int, int) -> bool # Parse string and check against requested version. # # We use a regexp instead of str.split because we want to discard any @@ -324,81 +508,223 @@ def _check_glibc_version(version_str, required_major, minimum_minor): def _have_compatible_glibc(required_major, minimum_minor): + # type: (int, int) -> bool version_str = _glibc_version_string() if version_str is None: return False return _check_glibc_version(version_str, required_major, minimum_minor) +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader(object): + # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header + class _InvalidELFFileHeader(ValueError): + """ + An invalid ELF file header was found. + """ + + ELF_MAGIC_NUMBER = 0x7F454C46 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 0xFF000000 + EF_ARM_ABI_VER5 = 0x05000000 + EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + def __init__(self, file): + # type: (IO[bytes]) -> None + def unpack(fmt): + # type: (str) -> int + try: + result, = struct.unpack( + fmt, file.read(struct.calcsize(fmt)) + ) # type: (int, ) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + # type: () -> Optional[_ELFFileHeader] + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return None + return elf_header + + +def _is_linux_armhf(): + # type: () -> bool + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABIMASK + ) == elf_header.EF_ARM_ABI_VER5 + result &= ( + elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD + ) == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + # type: () -> bool + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + # type: (str) -> bool + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return True + + def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + # type: (bool) -> Iterator[str] linux = _normalize_string(distutils.util.get_platform()) if linux == "linux_x86_64" and is_32bit: linux = "linux_i686" - manylinux_support = ( - ("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599) - ("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571) - ("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513) - ) + manylinux_support = [] + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: + manylinux_support.append( + ("manylinux2014", (2, 17)) + ) # CentOS 7 w/ glibc 2.17 (PEP 599) + if arch in {"x86_64", "i686"}: + manylinux_support.append( + ("manylinux2010", (2, 12)) + ) # CentOS 6 w/ glibc 2.12 (PEP 571) + manylinux_support.append( + ("manylinux1", (2, 5)) + ) # CentOS 5 w/ glibc 2.5 (PEP 513) manylinux_support_iter = iter(manylinux_support) for name, glibc_version in manylinux_support_iter: if _is_manylinux_compatible(name, glibc_version): - platforms = [linux.replace("linux", name)] + yield linux.replace("linux", name) break - else: - platforms = [] # Support for a later manylinux implies support for an earlier version. - platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter] - platforms.append(linux) - return platforms + for name, _ in manylinux_support_iter: + yield linux.replace("linux", name) + yield linux def _generic_platforms(): - platform = _normalize_string(distutils.util.get_platform()) - return [platform] + # type: () -> Iterator[str] + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + # type: () -> Iterator[str] + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() -def _interpreter_name(): - name = platform.python_implementation().lower() +def interpreter_name(): + # type: () -> str + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name # type: ignore + except AttributeError: # pragma: no cover + # Python 2.7 compatibility. + name = platform.python_implementation().lower() return INTERPRETER_SHORT_NAMES.get(name) or name -def _generic_interpreter(name, py_version): - version = sysconfig.get_config_var("py_version_nodot") - if not version: - version = "".join(map(str, py_version[:2])) - return "{name}{version}".format(name=name, version=version) +def interpreter_version(**kwargs): + # type: (bool) -> str + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = "".join(map(str, sys.version_info[:2])) + return version -def sys_tags(): +def sys_tags(**kwargs): + # type: (bool) -> Iterator[Tag] """ Returns the sequence of tag triples for the running interpreter. The order of the sequence corresponds to priority order for the interpreter, from most to least important. """ - py_version = sys.version_info[:2] - interpreter_name = _interpreter_name() - if platform.system() == "Darwin": - platforms = _mac_platforms() - elif platform.system() == "Linux": - platforms = _linux_platforms() - else: - platforms = _generic_platforms() + warn = _warn_keyword_parameter("sys_tags", kwargs) - if interpreter_name == "cp": - interpreter = _cpython_interpreter(py_version) - abis = _cpython_abis(py_version) - for tag in _cpython_tags(py_version, interpreter, abis, platforms): - yield tag - elif interpreter_name == "pp": - interpreter = _pypy_interpreter() - abi = _generic_abi() - for tag in _pypy_tags(py_version, interpreter, abi, platforms): + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): yield tag else: - interpreter = _generic_interpreter(interpreter_name, py_version) - abi = _generic_abi() - for tag in _generic_tags(interpreter, py_version, abi, platforms): + for tag in generic_tags(): yield tag - for tag in _independent_tags(interpreter, py_version, platforms): + + for tag in compatible_tags(): yield tag diff --git a/pipenv/patched/notpip/_vendor/packaging/utils.py b/pipenv/patched/notpip/_vendor/packaging/utils.py index 8841878693..44f1bf9873 100644 --- a/pipenv/patched/notpip/_vendor/packaging/utils.py +++ b/pipenv/patched/notpip/_vendor/packaging/utils.py @@ -5,28 +5,33 @@ import re +from ._typing import MYPY_CHECK_RUNNING from .version import InvalidVersion, Version +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import Union _canonicalize_regex = re.compile(r"[-_.]+") def canonicalize_name(name): + # type: (str) -> str # This is taken from PEP 503. return _canonicalize_regex.sub("-", name).lower() -def canonicalize_version(version): +def canonicalize_version(_version): + # type: (str) -> Union[Version, str] """ - This is very similar to Version.__str__, but has one subtle differences + This is very similar to Version.__str__, but has one subtle difference with the way it handles the release segment. """ try: - version = Version(version) + version = Version(_version) except InvalidVersion: # Legacy versions cannot be normalized - return version + return _version parts = [] diff --git a/pipenv/patched/notpip/_vendor/packaging/version.py b/pipenv/patched/notpip/_vendor/packaging/version.py index 95157a1f78..f39a2a12a1 100644 --- a/pipenv/patched/notpip/_vendor/packaging/version.py +++ b/pipenv/patched/notpip/_vendor/packaging/version.py @@ -7,8 +7,35 @@ import itertools import re -from ._structures import Infinity - +from ._structures import Infinity, NegativeInfinity +from ._typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: # pragma: no cover + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + + from ._structures import InfinityType, NegativeInfinityType + + InfiniteTypes = Union[InfinityType, NegativeInfinityType] + PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] + SubLocalType = Union[InfiniteTypes, int, str] + LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], + ] + CmpKey = Tuple[ + int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType + ] + LegacyCmpKey = Tuple[int, Tuple[str, ...]] + VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool + ] __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] @@ -19,6 +46,7 @@ def parse(version): + # type: (str) -> Union[LegacyVersion, Version] """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is @@ -37,28 +65,38 @@ class InvalidVersion(ValueError): class _BaseVersion(object): + _key = None # type: Union[CmpKey, LegacyCmpKey] + def __hash__(self): + # type: () -> int return hash(self._key) def __lt__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s < o) def __le__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s <= o) def __eq__(self, other): + # type: (object) -> bool return self._compare(other, lambda s, o: s == o) def __ge__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s >= o) def __gt__(self, other): + # type: (_BaseVersion) -> bool return self._compare(other, lambda s, o: s > o) def __ne__(self, other): + # type: (object) -> bool return self._compare(other, lambda s, o: s != o) def _compare(self, other, method): + # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] if not isinstance(other, _BaseVersion): return NotImplemented @@ -67,57 +105,71 @@ def _compare(self, other, method): class LegacyVersion(_BaseVersion): def __init__(self, version): + # type: (str) -> None self._version = str(version) self._key = _legacy_cmpkey(self._version) def __str__(self): + # type: () -> str return self._version def __repr__(self): + # type: () -> str return "".format(repr(str(self))) @property def public(self): + # type: () -> str return self._version @property def base_version(self): + # type: () -> str return self._version @property def epoch(self): + # type: () -> int return -1 @property def release(self): + # type: () -> None return None @property def pre(self): + # type: () -> None return None @property def post(self): + # type: () -> None return None @property def dev(self): + # type: () -> None return None @property def local(self): + # type: () -> None return None @property def is_prerelease(self): + # type: () -> bool return False @property def is_postrelease(self): + # type: () -> bool return False @property def is_devrelease(self): + # type: () -> bool return False @@ -133,6 +185,7 @@ def is_devrelease(self): def _parse_version_parts(s): + # type: (str) -> Iterator[str] for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) @@ -150,6 +203,8 @@ def _parse_version_parts(s): def _legacy_cmpkey(version): + # type: (str) -> LegacyCmpKey + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, @@ -158,7 +213,7 @@ def _legacy_cmpkey(version): # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. - parts = [] + parts = [] # type: List[str] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag @@ -171,9 +226,8 @@ def _legacy_cmpkey(version): parts.pop() parts.append(part) - parts = tuple(parts) - return epoch, parts + return epoch, tuple(parts) # Deliberately not anchored to the start and end of the string, to make it @@ -215,6 +269,8 @@ class Version(_BaseVersion): _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) def __init__(self, version): + # type: (str) -> None + # Validate the version and parse it into pieces match = self._regex.search(version) if not match: @@ -243,9 +299,11 @@ def __init__(self, version): ) def __repr__(self): + # type: () -> str return "".format(repr(str(self))) def __str__(self): + # type: () -> str parts = [] # Epoch @@ -275,26 +333,35 @@ def __str__(self): @property def epoch(self): - return self._version.epoch + # type: () -> int + _epoch = self._version.epoch # type: int + return _epoch @property def release(self): - return self._version.release + # type: () -> Tuple[int, ...] + _release = self._version.release # type: Tuple[int, ...] + return _release @property def pre(self): - return self._version.pre + # type: () -> Optional[Tuple[str, int]] + _pre = self._version.pre # type: Optional[Tuple[str, int]] + return _pre @property def post(self): + # type: () -> Optional[Tuple[str, int]] return self._version.post[1] if self._version.post else None @property def dev(self): + # type: () -> Optional[Tuple[str, int]] return self._version.dev[1] if self._version.dev else None @property def local(self): + # type: () -> Optional[str] if self._version.local: return ".".join(str(x) for x in self._version.local) else: @@ -302,10 +369,12 @@ def local(self): @property def public(self): + # type: () -> str return str(self).split("+", 1)[0] @property def base_version(self): + # type: () -> str parts = [] # Epoch @@ -319,18 +388,41 @@ def base_version(self): @property def is_prerelease(self): + # type: () -> bool return self.dev is not None or self.pre is not None @property def is_postrelease(self): + # type: () -> bool return self.post is not None @property def is_devrelease(self): + # type: () -> bool return self.dev is not None + @property + def major(self): + # type: () -> int + return self.release[0] if len(self.release) >= 1 else 0 + + @property + def minor(self): + # type: () -> int + return self.release[1] if len(self.release) >= 2 else 0 + + @property + def micro(self): + # type: () -> int + return self.release[2] if len(self.release) >= 3 else 0 + + +def _parse_letter_version( + letter, # type: str + number, # type: Union[str, bytes, SupportsInt] +): + # type: (...) -> Optional[Tuple[str, int]] -def _parse_letter_version(letter, number): if letter: # We consider there to be an implicit 0 in a pre-release if there is # not a numeral associated with it. @@ -360,11 +452,14 @@ def _parse_letter_version(letter, number): return letter, int(number) + return None + _local_version_separators = re.compile(r"[\._-]") def _parse_local_version(local): + # type: (str) -> Optional[LocalType] """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ @@ -373,15 +468,25 @@ def _parse_local_version(local): part.lower() if not part.isdigit() else int(part) for part in _local_version_separators.split(local) ) + return None + +def _cmpkey( + epoch, # type: int + release, # type: Tuple[int, ...] + pre, # type: Optional[Tuple[str, int]] + post, # type: Optional[Tuple[str, int]] + dev, # type: Optional[Tuple[str, int]] + local, # type: Optional[Tuple[SubLocalType]] +): + # type: (...) -> CmpKey -def _cmpkey(epoch, release, pre, post, dev, local): # When we compare a release version, we want to compare it with all of the # trailing zeros removed. So we'll use a reverse the list, drop all the now # leading zeros until we come to something non zero, then take the rest # re-reverse it back into the correct order and make it a tuple and use # that for our sorting key. - release = tuple( + _release = tuple( reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) ) @@ -390,23 +495,31 @@ def _cmpkey(epoch, release, pre, post, dev, local): # if there is not a pre or a post segment. If we have one of those then # the normal sorting rules will handle this case correctly. if pre is None and post is None and dev is not None: - pre = -Infinity + _pre = NegativeInfinity # type: PrePostDevType # Versions without a pre-release (except as noted above) should sort after # those with one. elif pre is None: - pre = Infinity + _pre = Infinity + else: + _pre = pre # Versions without a post segment should sort before those with one. if post is None: - post = -Infinity + _post = NegativeInfinity # type: PrePostDevType + + else: + _post = post # Versions without a development segment should sort after those with one. if dev is None: - dev = Infinity + _dev = Infinity # type: PrePostDevType + + else: + _dev = dev if local is None: # Versions without a local segment should sort before those with one. - local = -Infinity + _local = NegativeInfinity # type: LocalType else: # Versions with a local segment need that segment parsed to implement # the sorting rules in PEP440. @@ -415,6 +528,8 @@ def _cmpkey(epoch, release, pre, post, dev, local): # - Numeric segments sort numerically # - Shorter versions sort before longer versions when the prefixes # match exactly - local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) + _local = tuple( + (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local + ) - return epoch, release, pre, post, dev, local + return epoch, _release, _pre, _post, _dev, _local diff --git a/pipenv/patched/notpip/_vendor/pep517.pyi b/pipenv/patched/notpip/_vendor/pep517.pyi new file mode 100644 index 0000000000..d1ce810290 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pep517.pyi @@ -0,0 +1 @@ +from pep517 import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pkg_resources.pyi b/pipenv/patched/notpip/_vendor/pkg_resources.pyi new file mode 100644 index 0000000000..477030314f --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pkg_resources.pyi @@ -0,0 +1 @@ +from pkg_resources import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py b/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py index c13e11c159..b1cb5de7a6 100644 --- a/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py +++ b/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py @@ -88,8 +88,8 @@ __metaclass__ = type -if (3, 0) < sys.version_info < (3, 4): - raise RuntimeError("Python 3.4 or later is required") +if (3, 0) < sys.version_info < (3, 5): + raise RuntimeError("Python 3.5 or later is required") if six.PY2: # Those builtin exceptions are only defined in Python 3 @@ -333,7 +333,7 @@ class UnknownExtra(ResolutionError): _provider_factories = {} -PY_MAJOR = sys.version[:3] +PY_MAJOR = '{}.{}'.format(*sys.version_info) EGG_DIST = 3 BINARY_DIST = 2 SOURCE_DIST = 1 @@ -3109,6 +3109,7 @@ def __init__(self, requirement_string): self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, + self.url, self.specifier, frozenset(self.extras), str(self.marker) if self.marker else None, diff --git a/pipenv/patched/notpip/_vendor/progress.pyi b/pipenv/patched/notpip/_vendor/progress.pyi new file mode 100644 index 0000000000..c92de832bb --- /dev/null +++ b/pipenv/patched/notpip/_vendor/progress.pyi @@ -0,0 +1 @@ +from progress import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pyparsing.py b/pipenv/patched/notpip/_vendor/pyparsing.py index 1d47c4601b..ce3af0e5d9 100644 --- a/pipenv/patched/notpip/_vendor/pyparsing.py +++ b/pipenv/patched/notpip/_vendor/pyparsing.py @@ -95,8 +95,8 @@ namespace class """ -__version__ = "2.4.2" -__versionTime__ = "29 Jul 2019 02:58 UTC" +__version__ = "2.4.6" +__versionTime__ = "24 Dec 2019 04:27 UTC" __author__ = "Paul McGuire " import string @@ -114,6 +114,7 @@ from operator import itemgetter import itertools from functools import wraps +from contextlib import contextmanager try: # Python 3 @@ -184,8 +185,15 @@ class SimpleNamespace: pass __diag__.warn_name_set_on_empty_Forward = False __diag__.warn_on_multiple_string_args_to_oneof = False __diag__.enable_debug_on_named_expressions = False +__diag__._all_names = [nm for nm in vars(__diag__) if nm.startswith("enable_") or nm.startswith("warn_")] + +def _enable_all_warnings(): + __diag__.warn_multiple_tokens_in_named_alternation = True + __diag__.warn_ungrouped_named_tokens_in_collection = True + __diag__.warn_name_set_on_empty_Forward = True + __diag__.warn_on_multiple_string_args_to_oneof = True +__diag__.enable_all_warnings = _enable_all_warnings -# ~ sys.stderr.write("testing pyparsing module, version %s, %s\n" % (__version__, __versionTime__)) __all__ = ['__version__', '__versionTime__', '__author__', '__compat__', '__diag__', 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', @@ -206,7 +214,7 @@ class SimpleNamespace: pass 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass', 'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', - 'conditionAsParseAction', + 'conditionAsParseAction', 're', ] system_version = tuple(sys.version_info)[:3] @@ -2561,15 +2569,13 @@ def parseFile(self, file_or_filename, parseAll=False): raise exc def __eq__(self, other): - if isinstance(other, ParserElement): - if PY_3: - self is other or super(ParserElement, self).__eq__(other) - else: - return self is other or vars(self) == vars(other) + if self is other: + return True elif isinstance(other, basestring): return self.matches(other) - else: - return super(ParserElement, self) == other + elif isinstance(other, ParserElement): + return vars(self) == vars(other) + return False def __ne__(self, other): return not (self == other) @@ -3252,14 +3258,23 @@ class Regex(Token): If the given regex contains named groups (defined using ``(?P...)``), these will be preserved as named parse results. + If instead of the Python stdlib re module you wish to use a different RE module + (such as the `regex` module), you can replace it by either building your + Regex object with a compiled RE that was compiled using regex: + Example:: realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + + # use regex module instead of stdlib re module to construct a Regex using + # a compiled regular expression + import regex + parser = pp.Regex(regex.compile(r'[0-9]')) + """ - compiledREtype = type(re.compile("[A-Z]")) def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): """The parameters ``pattern`` and ``flags`` are passed to the ``re.compile()`` function as-is. See the Python @@ -3284,13 +3299,13 @@ def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): SyntaxWarning, stacklevel=2) raise - elif isinstance(pattern, Regex.compiledREtype): + elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'): self.re = pattern - self.pattern = self.reString = str(pattern) + self.pattern = self.reString = pattern.pattern self.flags = flags else: - raise ValueError("Regex may only be constructed with a string or a compiled RE object") + raise TypeError("Regex may only be constructed with a string or a compiled RE object") self.re_match = self.re.match @@ -3617,24 +3632,24 @@ class White(Token): '\n': '', '\r': '', '\f': '', - 'u\00A0': '', - 'u\1680': '', - 'u\180E': '', - 'u\2000': '', - 'u\2001': '', - 'u\2002': '', - 'u\2003': '', - 'u\2004': '', - 'u\2005': '', - 'u\2006': '', - 'u\2007': '', - 'u\2008': '', - 'u\2009': '', - 'u\200A': '', - 'u\200B': '', - 'u\202F': '', - 'u\205F': '', - 'u\3000': '', + u'\u00A0': '', + u'\u1680': '', + u'\u180E': '', + u'\u2000': '', + u'\u2001': '', + u'\u2002': '', + u'\u2003': '', + u'\u2004': '', + u'\u2005': '', + u'\u2006': '', + u'\u2007': '', + u'\u2008': '', + u'\u2009': '', + u'\u200A': '', + u'\u200B': '', + u'\u202F': '', + u'\u205F': '', + u'\u3000': '', } def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): super(White, self).__init__() @@ -4566,6 +4581,7 @@ def __init__(self, expr, retreat=None): self.retreat = retreat self.errmsg = "not preceded by " + str(expr) self.skipWhitespace = False + self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) def parseImpl(self, instring, loc=0, doActions=True): if self.exact: @@ -4576,19 +4592,18 @@ def parseImpl(self, instring, loc=0, doActions=True): else: # retreat specified a maximum lookbehind window, iterate test_expr = self.expr + StringEnd() - instring_slice = instring[:loc] + instring_slice = instring[max(0, loc - self.retreat):loc] last_expr = ParseException(instring, loc, self.errmsg) - for offset in range(1, min(loc, self.retreat + 1)): + for offset in range(1, min(loc, self.retreat + 1)+1): try: - _, ret = test_expr._parse(instring_slice, loc - offset) + # print('trying', offset, instring_slice, repr(instring_slice[loc - offset:])) + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) except ParseBaseException as pbe: last_expr = pbe else: break else: raise last_expr - # return empty list of tokens, but preserve any defined results names - del ret[:] return loc, ret @@ -6051,7 +6066,7 @@ def parseImpl(self, instring, loc, doActions=True): matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) elif arity == 3: matchExpr = (_FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) - + Group(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr)) + + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr))) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") elif rightLeftAssoc == opAssoc.RIGHT: @@ -6305,18 +6320,18 @@ def checkUnindent(s, l, t): if curCol < indentStack[-1]: indentStack.pop() - NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress(), stopOn=StringEnd()) INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') PEER = Empty().setParseAction(checkPeerIndent).setName('') UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') if indent: smExpr = Group(Optional(NL) + INDENT - + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL)) + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + UNDENT) else: smExpr = Group(Optional(NL) - + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL)) + + OneOrMore(PEER + Group(blockStatementExpr) + Optional(NL), stopOn=StringEnd()) + UNDENT) smExpr.setFailAction(lambda a, b, c, d: reset_stack()) blockStatementExpr.ignore(_bslash + LineEnd()) @@ -6822,6 +6837,187 @@ class Devanagari(unicode_set): setattr(pyparsing_unicode, u"देवनागरी", pyparsing_unicode.Devanagari) +class pyparsing_test: + """ + namespace class for classes useful in writing unit tests + """ + + class reset_pyparsing_context: + """ + Context manager to be used when writing unit tests that modify pyparsing config values: + - packrat parsing + - default whitespace characters. + - default keyword characters + - literal string auto-conversion class + - __diag__ settings + + Example: + with reset_pyparsing_context(): + # test that literals used to construct a grammar are automatically suppressed + ParserElement.inlineLiteralsUsing(Suppress) + + term = Word(alphas) | Word(nums) + group = Group('(' + term[...] + ')') + + # assert that the '()' characters are not included in the parsed tokens + self.assertParseAndCheckLisst(group, "(abc 123 def)", ['abc', '123', 'def']) + + # after exiting context manager, literals are converted to Literal expressions again + """ + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + self._save_context[ + "literal_string_class" + ] = ParserElement._literalStringClass + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context["__diag__"] = { + name: getattr(__diag__, name) for name in __diag__._all_names + } + self._save_context["__compat__"] = { + "collect_all_And_tokens": __compat__.collect_all_And_tokens + } + return self + + def restore(self): + # reset pyparsing global state + if ( + ParserElement.DEFAULT_WHITE_CHARS + != self._save_context["default_whitespace"] + ): + ParserElement.setDefaultWhitespaceChars( + self._save_context["default_whitespace"] + ) + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing( + self._save_context["literal_string_class"] + ) + for name, value in self._save_context["__diag__"].items(): + setattr(__diag__, name, value) + ParserElement._packratEnabled = self._save_context["packrat_enabled"] + ParserElement._parse = self._save_context["packrat_parse"] + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + return self.restore() + + class TestParseResultsAsserts: + """ + A mixin class to add parse results assertion methods to normal unittest.TestCase classes. + """ + def assertParseResultsEquals( + self, result, expected_list=None, expected_dict=None, msg=None + ): + """ + Unit test assertion to compare a ParseResults object with an optional expected_list, + and compare any defined results names with an optional expected_dict. + """ + if expected_list is not None: + self.assertEqual(expected_list, result.asList(), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, result.asDict(), msg=msg) + + def assertParseAndCheckList( + self, expr, test_string, expected_list, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asList() is equal to the expected_list. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict( + self, expr, test_string, expected_dict, msg=None, verbose=True + ): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asDict() is equal to the expected_dict. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults( + self, run_tests_report, expected_parse_results=None, msg=None + ): + """ + Unit test assertion to evaluate output of ParserElement.runTests(). If a list of + list-dict tuples is given as the expected_parse_results argument, then these are zipped + with the report tuples returned by runTests and evaluated using assertParseResultsEquals. + Finally, asserts that the overall runTests() success value is True. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + + if expected_parse_results is not None: + merged = [ + (rpt[0], rpt[1], expected) + for rpt, expected in zip(run_test_results, expected_parse_results) + ] + for test_string, result, expected in merged: + # expected should be a tuple containing a list and/or a dict or an exception, + # and optional failure message string + # an empty tuple will skip any result validation + fail_msg = next( + (exp for exp in expected if isinstance(exp, str)), None + ) + expected_exception = next( + ( + exp + for exp in expected + if isinstance(exp, type) and issubclass(exp, Exception) + ), + None, + ) + if expected_exception is not None: + with self.assertRaises( + expected_exception=expected_exception, msg=fail_msg or msg + ): + if isinstance(result, Exception): + raise result + else: + expected_list = next( + (exp for exp in expected if isinstance(exp, list)), None + ) + expected_dict = next( + (exp for exp in expected if isinstance(exp, dict)), None + ) + if (expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals( + result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=fail_msg or msg, + ) + else: + # warning here maybe? + print("no validation for {!r}".format(test_string)) + + # do this last, in case some specific test results can be reported instead + self.assertTrue( + run_test_success, msg=msg if msg is not None else "failed runTests" + ) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield + + if __name__ == "__main__": selectToken = CaselessLiteral("select") diff --git a/pipenv/patched/notpip/_vendor/pyparsing.pyi b/pipenv/patched/notpip/_vendor/pyparsing.pyi new file mode 100644 index 0000000000..8e9de6b021 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pyparsing.pyi @@ -0,0 +1 @@ +from pyparsing import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pytoml.pyi b/pipenv/patched/notpip/_vendor/pytoml.pyi new file mode 100644 index 0000000000..5566ee8972 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pytoml.pyi @@ -0,0 +1 @@ +from pytoml import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/requests.pyi b/pipenv/patched/notpip/_vendor/requests.pyi new file mode 100644 index 0000000000..6d69cd6fac --- /dev/null +++ b/pipenv/patched/notpip/_vendor/requests.pyi @@ -0,0 +1 @@ +from requests import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/retrying.pyi b/pipenv/patched/notpip/_vendor/retrying.pyi new file mode 100644 index 0000000000..90f20c6dbc --- /dev/null +++ b/pipenv/patched/notpip/_vendor/retrying.pyi @@ -0,0 +1 @@ +from retrying import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/six.LICENSE b/pipenv/patched/notpip/_vendor/six.LICENSE index 365d10741b..de6633112c 100644 --- a/pipenv/patched/notpip/_vendor/six.LICENSE +++ b/pipenv/patched/notpip/_vendor/six.LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2010-2018 Benjamin Peterson +Copyright (c) 2010-2020 Benjamin Peterson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/pipenv/patched/notpip/_vendor/six.py b/pipenv/patched/notpip/_vendor/six.py index 89b2188fd6..5fe9f8e141 100644 --- a/pipenv/patched/notpip/_vendor/six.py +++ b/pipenv/patched/notpip/_vendor/six.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2018 Benjamin Peterson +# Copyright (c) 2010-2020 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -29,7 +29,7 @@ import types __author__ = "Benjamin Peterson " -__version__ = "1.12.0" +__version__ = "1.14.0" # Useful for very coarse version differentiation. @@ -255,9 +255,11 @@ class _MovedItems(_LazyModule): MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), @@ -637,13 +639,16 @@ def u(s): import io StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" else: def b(s): return s @@ -665,6 +670,7 @@ def indexbytes(buf, i): _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") @@ -681,6 +687,10 @@ def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") @@ -716,16 +726,7 @@ def exec_(_code_, _globs_=None, _locs_=None): """) -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""") -elif sys.version_info[:2] > (3, 2): +if sys.version_info[:2] > (3,): exec_("""def raise_from(value, from_value): try: raise value from from_value @@ -805,13 +806,33 @@ def print_(*args, **kwargs): _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + else: wraps = functools.wraps @@ -824,7 +845,15 @@ def with_metaclass(meta, *bases): class metaclass(type): def __new__(cls, name, this_bases, d): - return meta(name, bases, d) + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) @classmethod def __prepare__(cls, name, this_bases): @@ -908,10 +937,9 @@ def ensure_text(s, encoding='utf-8', errors='strict'): raise TypeError("not expecting type '%s'" % type(s)) - def python_2_unicode_compatible(klass): """ - A decorator that defines __unicode__ and __str__ methods under Python 2. + A class decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method diff --git a/pipenv/patched/notpip/_vendor/six/__init__.pyi b/pipenv/patched/notpip/_vendor/six/__init__.pyi new file mode 100644 index 0000000000..e5c0e24227 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/six/__init__.pyi @@ -0,0 +1 @@ +from six import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/six/moves/__init__.pyi b/pipenv/patched/notpip/_vendor/six/moves/__init__.pyi new file mode 100644 index 0000000000..7a82f79db6 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/six/moves/__init__.pyi @@ -0,0 +1 @@ +from six.moves import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/six/moves/configparser.pyi b/pipenv/patched/notpip/_vendor/six/moves/configparser.pyi new file mode 100644 index 0000000000..f77b3f4105 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/six/moves/configparser.pyi @@ -0,0 +1 @@ +from six.moves.configparser import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/urllib3.pyi b/pipenv/patched/notpip/_vendor/urllib3.pyi new file mode 100644 index 0000000000..7e8a2a7085 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/urllib3.pyi @@ -0,0 +1 @@ +from urllib3 import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/urllib3/__init__.py b/pipenv/patched/notpip/_vendor/urllib3/__init__.py index 8f5a21f346..96474d3680 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/__init__.py +++ b/pipenv/patched/notpip/_vendor/urllib3/__init__.py @@ -22,7 +22,7 @@ __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __license__ = "MIT" -__version__ = "1.25.6" +__version__ = "1.25.7" __all__ = ( "HTTPConnectionPool", diff --git a/pipenv/patched/notpip/_vendor/urllib3/connection.py b/pipenv/patched/notpip/_vendor/urllib3/connection.py index 3eeb1af58e..f5c946adf7 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/connection.py +++ b/pipenv/patched/notpip/_vendor/urllib3/connection.py @@ -412,7 +412,7 @@ def connect(self): ( "Certificate for {0} has no `subjectAltName`, falling back to check for a " "`commonName` for now. This feature is being removed by major browsers and " - "deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " "for details.)".format(hostname) ), SubjectAltNameWarning, @@ -430,7 +430,7 @@ def _match_hostname(cert, asserted_hostname): match_hostname(cert, asserted_hostname) except CertificateError as e: log.warning( - "Certificate did not match expected hostname: %s. " "Certificate: %s", + "Certificate did not match expected hostname: %s. Certificate: %s", asserted_hostname, cert, ) diff --git a/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py b/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py index e73fa57a42..31696460f0 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py +++ b/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py @@ -257,7 +257,7 @@ def _get_conn(self, timeout=None): if self.block: raise EmptyPoolError( self, - "Pool reached maximum size and no more " "connections are allowed.", + "Pool reached maximum size and no more connections are allowed.", ) pass # Oh well, we'll create a new connection then @@ -626,7 +626,7 @@ def urlopen( # # See issue #651 [1] for details. # - # [1] + # [1] release_this_conn = release_conn # Merge the proxy headers. Only do this in HTTP. We have to copy the @@ -742,10 +742,7 @@ def urlopen( if not conn: # Try again log.warning( - "Retrying (%r) after connection " "broken by '%r': %s", - retries, - err, - url, + "Retrying (%r) after connection broken by '%r': %s", retries, err, url ) return self.urlopen( method, @@ -758,6 +755,7 @@ def urlopen( timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, + chunked=chunked, body_pos=body_pos, **response_kw ) @@ -809,6 +807,7 @@ def drain_and_release_conn(response): timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, + chunked=chunked, body_pos=body_pos, **response_kw ) @@ -842,6 +841,7 @@ def drain_and_release_conn(response): timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, + chunked=chunked, body_pos=body_pos, **response_kw ) @@ -961,7 +961,7 @@ def _new_conn(self): if not self.ConnectionCls or self.ConnectionCls is DummyConnection: raise SSLError( - "Can't connect to HTTPS URL because the SSL " "module is not available." + "Can't connect to HTTPS URL because the SSL module is not available." ) actual_host = self.host diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/_appengine_environ.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/_appengine_environ.py index c909010bf2..119efaeeb6 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/_appengine_environ.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/_appengine_environ.py @@ -6,27 +6,31 @@ def is_appengine(): - return is_local_appengine() or is_prod_appengine() or is_prod_appengine_mvms() + return "APPENGINE_RUNTIME" in os.environ def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() + """Reports if the app is running in the first generation sandbox. + + The second generation runtimes are technically still in a sandbox, but it + is much less restrictive, so generally you shouldn't need to check for it. + see https://cloud.google.com/appengine/docs/standard/runtimes + """ + return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" def is_local_appengine(): - return ( - "APPENGINE_RUNTIME" in os.environ - and "Development/" in os.environ["SERVER_SOFTWARE"] + return is_appengine() and os.environ.get("SERVER_SOFTWARE", "").startswith( + "Development/" ) def is_prod_appengine(): - return ( - "APPENGINE_RUNTIME" in os.environ - and "Google App Engine/" in os.environ["SERVER_SOFTWARE"] - and not is_prod_appengine_mvms() + return is_appengine() and os.environ.get("SERVER_SOFTWARE", "").startswith( + "Google App Engine/" ) def is_prod_appengine_mvms(): - return os.environ.get("GAE_VM", False) == "true" + """Deprecated.""" + return False diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/bindings.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/bindings.py index b46e1e3b5d..d9b6733318 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/bindings.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/bindings.py @@ -415,6 +415,7 @@ class SecurityConst(object): kTLSProtocol1 = 4 kTLSProtocol11 = 7 kTLSProtocol12 = 8 + # SecureTransport does not support TLS 1.3 even if there's a constant for it kTLSProtocol13 = 10 kTLSProtocolMaxSupported = 999 diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py index d8716b9f9e..b3e313ebc3 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py @@ -108,13 +108,6 @@ def __init__( "URLFetch is not available in this environment." ) - if is_prod_appengine_mvms(): - raise AppEnginePlatformError( - "Use normal urllib3.PoolManager instead of AppEngineManager" - "on Managed VMs, as using URLFetch is not necessary in " - "this environment." - ) - warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/ntlmpool.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/ntlmpool.py index 9c96be29d8..1fd242a6e0 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/ntlmpool.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/ntlmpool.py @@ -96,9 +96,7 @@ def _new_conn(self): log.debug("Response data: %s [...]", res.read()[:100]) if res.status != 200: if res.status == 401: - raise Exception( - "Server rejected request: wrong " "username or password" - ) + raise Exception("Server rejected request: wrong username or password") raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) res.fp = None diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py index 24e6b5c4d9..87d844afa7 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py @@ -144,13 +144,10 @@ ] # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of -# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version. -# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+ +# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ _protocol_to_min_max = { - util.PROTOCOL_TLS: ( - SecurityConst.kTLSProtocol1, - SecurityConst.kTLSProtocolMaxSupported, - ) + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12) } if hasattr(ssl, "PROTOCOL_SSLv2"): @@ -488,15 +485,7 @@ def handshake( result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) - # TLS 1.3 isn't necessarily enabled by the OS - # so we have to detect when we error out and try - # setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported - # was added in macOS 10.13 along with kTLSProtocol13. result = Security.SSLSetProtocolVersionMax(self.context, max_version) - if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported: - result = Security.SSLSetProtocolVersionMax( - self.context, SecurityConst.kTLSProtocol12 - ) _assert_no_error(result) # If there's a trust DB, we need to use it. We do that by telling @@ -707,7 +696,7 @@ def version(self): ) _assert_no_error(result) if protocol.value == SecurityConst.kTLSProtocol13: - return "TLSv1.3" + raise ssl.SSLError("SecureTransport does not support TLS 1.3") elif protocol.value == SecurityConst.kTLSProtocol12: return "TLSv1.2" elif protocol.value == SecurityConst.kTLSProtocol11: diff --git a/pipenv/patched/notpip/_vendor/urllib3/exceptions.py b/pipenv/patched/notpip/_vendor/urllib3/exceptions.py index 93d93fba7d..0a74c79b5e 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/exceptions.py +++ b/pipenv/patched/notpip/_vendor/urllib3/exceptions.py @@ -222,7 +222,7 @@ def __init__(self, partial, expected): super(IncompleteRead, self).__init__(partial, expected) def __repr__(self): - return "IncompleteRead(%i bytes read, " "%i more expected)" % ( + return "IncompleteRead(%i bytes read, %i more expected)" % ( self.partial, self.expected, ) diff --git a/pipenv/patched/notpip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/pipenv/patched/notpip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py index 507c655d7f..cf5b733c1e 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/pipenv/patched/notpip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -153,10 +153,8 @@ def match_hostname(cert, hostname): "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) ) elif len(dnsnames) == 1: - raise CertificateError( - "hostname %r " "doesn't match %r" % (hostname, dnsnames[0]) - ) + raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) else: raise CertificateError( - "no appropriate commonName or " "subjectAltName fields were found" + "no appropriate commonName or subjectAltName fields were found" ) diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/connection.py b/pipenv/patched/notpip/_vendor/urllib3/util/connection.py index 0e1112628e..86f0a3b00e 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/connection.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/connection.py @@ -121,7 +121,7 @@ def _has_ipv6(host): # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To # determine that we must bind to an IPv6 address. - # https://github.com/shazow/urllib3/pull/611 + # https://github.com/urllib3/urllib3/pull/611 # https://bugs.python.org/issue658327 try: sock = socket.socket(socket.AF_INET6) diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/request.py b/pipenv/patched/notpip/_vendor/urllib3/util/request.py index 262a6d6185..3b7bb54daf 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/request.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/request.py @@ -122,7 +122,7 @@ def rewind_body(body, body_pos): body_seek(body_pos) except (IOError, OSError): raise UnrewindableBodyError( - "An error occurred when rewinding request " "body for redirect/retry." + "An error occurred when rewinding request body for redirect/retry." ) elif body_pos is _FAILEDTELL: raise UnrewindableBodyError( @@ -131,5 +131,5 @@ def rewind_body(body, body_pos): ) else: raise ValueError( - "body_pos must be of type integer, " "instead it was %s." % type(body_pos) + "body_pos must be of type integer, instead it was %s." % type(body_pos) ) diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/timeout.py b/pipenv/patched/notpip/_vendor/urllib3/util/timeout.py index c1dc1e9712..9883700556 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/timeout.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/timeout.py @@ -203,7 +203,7 @@ def get_connect_duration(self): """ if self._start_connect is None: raise TimeoutStateError( - "Can't get connect duration for timer " "that has not started." + "Can't get connect duration for timer that has not started." ) return current_time() - self._start_connect diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/url.py b/pipenv/patched/notpip/_vendor/urllib3/util/url.py index 007157aeb8..6e8e8c70dc 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/url.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/url.py @@ -55,7 +55,7 @@ ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" -TARGET_RE = re.compile(r"^(/[^?]*)(?:\?([^#]+))?(?:#(.*))?$") +TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") IPV4_RE = re.compile("^" + IPV4_PAT + "$") IPV6_RE = re.compile("^" + IPV6_PAT + "$") @@ -325,14 +325,11 @@ def _encode_target(target): if not target.startswith("/"): return target - path, query, fragment = TARGET_RE.match(target).groups() + path, query = TARGET_RE.match(target).groups() target = _encode_invalid_chars(path, PATH_CHARS) query = _encode_invalid_chars(query, QUERY_CHARS) - fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) if query is not None: target += "?" + query - if fragment is not None: - target += "#" + target return target diff --git a/pipenv/patched/notpip/_vendor/vendor.txt b/pipenv/patched/notpip/_vendor/vendor.txt index aadd35261a..cbc2830ac0 100644 --- a/pipenv/patched/notpip/_vendor/vendor.txt +++ b/pipenv/patched/notpip/_vendor/vendor.txt @@ -1,23 +1,23 @@ appdirs==1.4.3 -CacheControl==0.12.5 -colorama==0.4.1 +CacheControl==0.12.6 +colorama==0.4.3 contextlib2==0.6.0 -distlib==0.2.9.post0 +distlib==0.3.0 distro==1.4.0 html5lib==1.0.1 -ipaddress==1.0.22 # Only needed on 2.6 and 2.7 +ipaddress==1.0.23 # Only needed on 2.6 and 2.7 msgpack==0.6.2 -packaging==19.2 +packaging==20.1 pep517==0.7.0 progress==1.5 -pyparsing==2.4.2 +pyparsing==2.4.6 pytoml==0.1.21 requests==2.22.0 - certifi==2019.9.11 + certifi==2019.11.28 chardet==3.0.4 idna==2.8 - urllib3==1.25.6 + urllib3==1.25.7 retrying==1.3.3 -setuptools==41.4.0 -six==1.12.0 +setuptools==44.0.0 +six==1.14.0 webencodings==0.5.1 diff --git a/pipenv/patched/notpip/_vendor/webencodings.pyi b/pipenv/patched/notpip/_vendor/webencodings.pyi new file mode 100644 index 0000000000..a11db4d82c --- /dev/null +++ b/pipenv/patched/notpip/_vendor/webencodings.pyi @@ -0,0 +1 @@ +from webencodings import * \ No newline at end of file diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py index fd8ecddd7f..4f7efbf829 100644 --- a/pipenv/patched/piptools/_compat/__init__.py +++ b/pipenv/patched/piptools/_compat/__init__.py @@ -27,15 +27,13 @@ get_installed_distributions, install_req_from_editable, install_req_from_line, - is_dir_url, - is_file_url, - is_vcs_url, parse_requirements, path_to_url, pip_version, stdlib_pkgs, url_to_path, user_cache_dir, + normalize_path, ) if six.PY2: diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py index 765bd49ed4..ea51421088 100644 --- a/pipenv/patched/piptools/_compat/pip_compat.py +++ b/pipenv/patched/piptools/_compat/pip_compat.py @@ -34,6 +34,12 @@ def do_import(module_path, subimport=None, old_path=None): else: return getattr(imported, package) +if PIP_VERSION[:2] <= (20, 0): + def install_req_from_parsed_requirement(req, **kwargs): + return req + +else: + from pipenv.patched.notpip._internal.req.constructors import install_req_from_parsed_requirement InstallRequirement = pip_shims.shims.InstallRequirement InstallationError = pip_shims.shims.InstallationError @@ -58,37 +64,14 @@ def do_import(module_path, subimport=None, old_path=None): VcsSupport = pip_shims.shims.VcsSupport WheelCache = pip_shims.shims.WheelCache pip_version = pip_shims.shims.pip_version - -# pip 18.1 has refactored InstallRequirement constructors use by pip-tools. -if PIP_VERSION < (18, 1): - install_req_from_line = InstallRequirement.from_line - install_req_from_editable = InstallRequirement.from_editable -else: - install_req_from_line = do_import("req.constructors", "install_req_from_line") - install_req_from_editable = do_import( - "req.constructors", "install_req_from_editable" - ) - - -def is_vcs_url(link): - if PIP_VERSION < (19, 3): - _is_vcs_url = do_import("download", "is_vcs_url") - return _is_vcs_url(link) - - return link.is_vcs - - -def is_file_url(link): - if PIP_VERSION < (19, 3): - _is_file_url = do_import("download", "is_file_url") - return _is_file_url(link) - - return link.is_file - - -def is_dir_url(link): - if PIP_VERSION < (19, 3): - _is_dir_url = do_import("download", "is_dir_url") - return _is_dir_url(link) - - return link.is_existing_dir() +normalize_path = do_import("utils.misc", "normalize_path") +install_req_from_line = pip_shims.shims.install_req_from_line +install_req_from_editable = pip_shims.shims.install_req_from_editable + +def parse_requirements( + filename, session, finder=None, options=None, constraint=False, isolated=False +): + for parsed_req in _parse_requirements( + filename, session, finder=finder, options=options, constraint=constraint + ): + yield install_req_from_parsed_requirement(parsed_req, isolated=isolated) diff --git a/pipenv/patched/piptools/cache.py b/pipenv/patched/piptools/cache.py index 14a276db95..983ddb6f25 100644 --- a/pipenv/patched/piptools/cache.py +++ b/pipenv/patched/piptools/cache.py @@ -3,14 +3,25 @@ import json import os +import platform import sys -from pipenv.patched.notpip._vendor.packaging.requirements import Requirement +from pipenv.vendor.packaging.requirements import Requirement from .exceptions import PipToolsError -from .locations import CACHE_DIR from .utils import as_tuple, key_from_req, lookup_table +_PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"} + + +def _implementation_name(): + """similar to PEP 425, however the minor version is separated from the + major to differentation "3.10" and "31.0". + """ + implementation_name = platform.python_implementation().lower() + implementation = _PEP425_PY_TAGS.get(implementation_name, "??") + return "{}{}.{}".format(implementation, *sys.version_info) + class CorruptCacheError(PipToolsError): def __init__(self, path): @@ -46,16 +57,14 @@ class DependencyCache(object): ~/.cache/pip-tools/depcache-pyX.Y.json + Where py indicates the Python implementation. Where X.Y indicates the Python version. """ - def __init__(self, cache_dir=None): - if cache_dir is None: - cache_dir = CACHE_DIR + def __init__(self, cache_dir): if not os.path.isdir(cache_dir): os.makedirs(cache_dir) - py_version = ".".join(str(digit) for digit in sys.version_info[:2]) - cache_filename = "depcache-py{}.json".format(py_version) + cache_filename = "depcache-{}.json".format(_implementation_name()) self._cache_file = os.path.join(cache_dir, cache_filename) self._cache = None diff --git a/pipenv/patched/piptools/locations.py b/pipenv/patched/piptools/locations.py index 7abf5c76a8..36cc538268 100644 --- a/pipenv/patched/piptools/locations.py +++ b/pipenv/patched/piptools/locations.py @@ -2,6 +2,7 @@ from shutil import rmtree from ._compat import user_cache_dir + from .click import secho # The user_cache_dir helper comes straight from pipenv.patched.notpip itself diff --git a/pipenv/patched/piptools/logging.py b/pipenv/patched/piptools/logging.py index 488a8a2e5e..8b379b8d74 100644 --- a/pipenv/patched/piptools/logging.py +++ b/pipenv/patched/piptools/logging.py @@ -1,8 +1,14 @@ # coding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals +import logging + from . import click +# Initialise the builtin logging module for other component using it. +# Ex: pip +logging.basicConfig() + class LogContext(object): def __init__(self, verbosity=0): diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py index c1bcf9d1ba..1aa29f0e13 100644 --- a/pipenv/patched/piptools/repositories/local.py +++ b/pipenv/patched/piptools/repositories/local.py @@ -3,10 +3,10 @@ from contextlib import contextmanager -from .._compat import FAVORITE_HASH +from .._compat import PIP_VERSION, FAVORITE_HASH from .base import BaseRepository -from piptools.utils import as_tuple, key_from_req, make_install_requirement +from piptools.utils import as_tuple, key_from_ireq, make_install_requirement def ireq_satisfied_by_existing_pin(ireq, existing_pin): @@ -15,7 +15,9 @@ def ireq_satisfied_by_existing_pin(ireq, existing_pin): previously encountered version pin. """ version = next(iter(existing_pin.req.specifier)).version - return version in ireq.req.specifier + return ireq.req.specifier.contains( + version, prereleases=existing_pin.req.specifier.prereleases + ) class LocalRequirementsRepository(BaseRepository): @@ -56,7 +58,7 @@ def freshen_build_caches(self): self.repository.freshen_build_caches() def find_best_match(self, ireq, prereleases=None): - key = key_from_req(ireq.req) + key = key_from_ireq(ireq) existing_pin = self.existing_pins.get(key) if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): project, version, _ = as_tuple(existing_pin) @@ -71,10 +73,13 @@ def get_dependencies(self, ireq): return self.repository.get_dependencies(ireq) def get_hashes(self, ireq): - key = key_from_req(ireq.req) + key = key_from_ireq(ireq) existing_pin = self.existing_pins.get(key) if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): - hashes = existing_pin.options.get("hashes", {}) + if PIP_VERSION[:2] <= (20, 0): + hashes = existing_pin.options.get("hashes", {}) + else: + hashes = existing_pin.hash_options hexdigests = hashes.get(FAVORITE_HASH) if hexdigests: return { diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index ff02d36c0c..fe6e7350b4 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -6,9 +6,14 @@ import hashlib import os from contextlib import contextmanager -from functools import partial from shutil import rmtree +from pip_shims.shims import ( + TempDirectory, + global_tempdir_manager, + get_requirement_tracker, + InstallCommand +) from packaging.requirements import Requirement from packaging.specifiers import Specifier, SpecifierSet @@ -18,31 +23,27 @@ InstallationError, InstallRequirement, Link, + normalize_path, PyPI, RequirementSet, RequirementTracker, - Resolver as PipResolver, SafeFileCache, TemporaryDirectory, VcsSupport, Wheel, WheelCache, contextlib, - is_dir_url, - is_file_url, - is_vcs_url, path_to_url, pip_version, url_to_path, ) -from ..cache import CACHE_DIR +from ..locations import CACHE_DIR from ..click import progressbar from ..exceptions import NoCandidateFound from ..logging import log from ..utils import ( dedup, clean_requires_python, - create_install_command, fs_str, is_pinned_requirement, is_url_requirement, @@ -51,7 +52,7 @@ ) from .base import BaseRepository -os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") +os.environ["PIP_SHIMS_BASE_MODULE"] = str("pip") FILE_CHUNK_SIZE = 4096 FileStream = collections.namedtuple("FileStream", "stream size") @@ -105,20 +106,28 @@ class PyPIRepository(BaseRepository): changed/configured on the Finder. """ - def __init__(self, pip_args, session=None, build_isolation=False, use_json=False): + def __init__(self, pip_args, cache_dir=CACHE_DIR, session=None, build_isolation=False, use_json=False): self.build_isolation = build_isolation self.use_json = use_json + self.cache_dir = cache_dir # Use pip's parser for pip.conf management and defaults. # General options (find_links, index_url, extra_index_url, trusted_host, # and pre) are deferred to pip. - command = create_install_command() - self.options, _ = command.parse_args(pip_args) + self.command = InstallCommand() + self.options, _ = self.command.parse_args(pip_args) + if self.build_isolation is not None: + self.options.build_isolation = build_isolation + if self.options.cache_dir: + self.options.cache_dir = normalize_path(self.options.cache_dir) + + self.options.require_hashes = False + self.options.ignore_dependencies = False if session is None: - session = command._build_session(self.options) + session = self.command._build_session(self.options) self.session = session - self.finder = command._build_package_finder( + self.finder = self.command._build_package_finder( options=self.options, session=self.session ) @@ -139,8 +148,9 @@ def __init__(self, pip_args, session=None, build_isolation=False, use_json=False # Setup file paths self.freshen_build_caches() - self._download_dir = fs_str(os.path.join(CACHE_DIR, "pkgs")) - self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, "wheels")) + self._cache_dir = normalize_path(cache_dir) + self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs")) + self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels")) def freshen_build_caches(self): """ @@ -191,26 +201,13 @@ def find_best_match(self, ireq, prereleases=None): if not matching_candidates: raise NoCandidateFound(ireq, all_candidates, self.finder) - if PIP_VERSION < (19, 1): - best_candidate = max( - matching_candidates, key=self.finder._candidate_sort_key - ) - elif PIP_VERSION < (19, 2): - evaluator = self.finder.candidate_evaluator - best_candidate = evaluator.get_best_candidate(matching_candidates) - elif PIP_VERSION < (19, 3): - evaluator = self.finder.make_candidate_evaluator(ireq.name) - best_candidate = evaluator.get_best_candidate(matching_candidates) - else: - evaluator = self.finder.make_candidate_evaluator(ireq.name) - best_candidate_result = evaluator.compute_best_candidate( - matching_candidates - ) - best_candidate = best_candidate_result.best_candidate + evaluator = self.finder.make_candidate_evaluator(ireq.name) + best_candidate_result = evaluator.compute_best_candidate(matching_candidates) + best_candidate = best_candidate_result.best_candidate # Turn the candidate into a pinned InstallRequirement return make_install_requirement( - best_candidate.project, + best_candidate.name, best_candidate.version, ireq.extras, ireq.markers, @@ -274,90 +271,39 @@ def gen(ireq): return set() def resolve_reqs(self, download_dir, ireq, wheel_cache): - results = None - ireq.isolated = self.build_isolation - ireq._wheel_cache = wheel_cache - if ireq and not ireq.link: - ireq.populate_link(self.finder, False, False) - if ireq.link and not ireq.link.is_wheel: - ireq.ensure_has_source_dir(self.source_dir) - - if PIP_VERSION < (10,): - reqset = RequirementSet( - self.build_dir, - self.source_dir, + with get_requirement_tracker() as req_tracker, TempDirectory( + kind="resolver" + ) as temp_dir: + preparer = self.command.make_requirement_preparer( + temp_build_dir=temp_dir, + options=self.options, + req_tracker=req_tracker, + session=self.session, + finder=self.finder, + use_user_site=False, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, - session=self.session, - ignore_installed=True, - ignore_compatibility=False, - wheel_cache=wheel_cache, ) - results = reqset._prepare_file(self.finder, ireq, ignore_requires_python=True) - else: - from pip_shims.shims import RequirementPreparer - - preparer_kwargs = { - "build_dir": self.build_dir, - "src_dir": self.source_dir, - "download_dir": download_dir, - "wheel_download_dir": self._wheel_download_dir, - "progress_bar": "off", - "build_isolation": self.build_isolation, - } - resolver_kwargs = { - "finder": self.finder, - "session": self.session, - "upgrade_strategy": "to-satisfy-only", - "force_reinstall": False, - "ignore_dependencies": False, - "ignore_requires_python": True, - "ignore_installed": True, - "use_user_site": False, - "ignore_compatibility": False, - "use_pep517": True, - } - make_install_req_kwargs = {"isolated": False, "wheel_cache": wheel_cache} - - if PIP_VERSION < (19, 3): - resolver_kwargs.update(**make_install_req_kwargs) - else: - from pipenv.vendor.pip_shims.shims import install_req_from_req_string - make_install_req = partial( - install_req_from_req_string, **make_install_req_kwargs - ) - resolver_kwargs["make_install_req"] = make_install_req - del resolver_kwargs["use_pep517"] - - if PIP_VERSION >= (20,): - preparer_kwargs["session"] = self.session - del resolver_kwargs["session"] - - resolver = None - preparer = None - reqset = None - with RequirementTracker() as req_tracker: - # Pip 18 uses a requirement tracker to prevent fork bombs - if req_tracker: - preparer_kwargs["req_tracker"] = req_tracker - preparer = RequirementPreparer(**preparer_kwargs) - resolver_kwargs["preparer"] = preparer - reqset = RequirementSet() - ireq.is_direct = True - - resolver = PipResolver(**resolver_kwargs) - require_hashes = False - if PIP_VERSION < (20,): - resolver.require_hashes = require_hashes - results = resolver._resolve_one(reqset, ireq) - else: - results = resolver._resolve_one(reqset, ireq, require_hashes) - try: - reqset.cleanup_files() - except (AttributeError, OSError): - pass + reqset = RequirementSet() + ireq.is_direct = True + reqset.add_requirement(ireq) + + resolver = self.command.make_resolver( + preparer=preparer, + finder=self.finder, + options=self.options, + wheel_cache=wheel_cache, + use_user_site=False, + ignore_installed=True, + ignore_requires_python=False, + force_reinstall=False, + upgrade_strategy="to-satisfy-only", + ) + results = resolver._resolve_one(reqset, ireq) + if PIP_VERSION[:2] <= (20, 0): + reqset.cleanup_files() results = set(results) if results else set() return results, ireq @@ -383,7 +329,7 @@ def get_legacy_dependencies(self, ireq): # If a download_dir is passed, pip will unnecessarely # archive the entire source directory download_dir = None - elif ireq.link and is_vcs_url(ireq.link): + elif ireq.link and ireq.link.is_vcs: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. download_dir = None @@ -394,21 +340,24 @@ def get_legacy_dependencies(self, ireq): if not os.path.isdir(self._wheel_download_dir): os.makedirs(self._wheel_download_dir) - wheel_cache = WheelCache(CACHE_DIR, self.options.format_control) - prev_tracker = os.environ.get("PIP_REQ_TRACKER") - try: - results, ireq = self.resolve_reqs(download_dir, ireq, wheel_cache) - self._dependencies_cache[ireq] = results - finally: - if "PIP_REQ_TRACKER" in os.environ: - if prev_tracker: - os.environ["PIP_REQ_TRACKER"] = prev_tracker - else: - del os.environ["PIP_REQ_TRACKER"] - - # WheelCache.cleanup() introduced in pip==10.0.0 - if PIP_VERSION >= (10,): - wheel_cache.cleanup() + with global_tempdir_manager(): + wheel_cache = WheelCache(self._cache_dir, self.options.format_control) + prev_tracker = os.environ.get("PIP_REQ_TRACKER") + try: + results, ireq = self.resolve_reqs( + download_dir, ireq, wheel_cache + ) + self._dependencies_cache[ireq] = results + finally: + if "PIP_REQ_TRACKER" in os.environ: + if prev_tracker: + os.environ["PIP_REQ_TRACKER"] = prev_tracker + else: + del os.environ["PIP_REQ_TRACKER"] + + if PIP_VERSION[:2] <= (20, 0): + wheel_cache.cleanup() + return self._dependencies_cache[ireq] def get_hashes(self, ireq): @@ -421,7 +370,7 @@ def get_hashes(self, ireq): if ireq.link: link = ireq.link - if is_vcs_url(link) or (is_file_url(link) and is_dir_url(link)): + if link.is_vcs or (link.is_file and link.is_existing_dir()): # Return empty set for unhashable requirements. # Unhashable logic modeled on pip's # RequirementPreparer.prepare_linked_requirement @@ -451,14 +400,9 @@ def get_hashes(self, ireq): log.debug(" {}".format(ireq.name)) - def get_candidate_link(candidate): - if PIP_VERSION < (19, 2): - return candidate.location - return candidate.link - return { h for h in - map(lambda c: self._hash_cache.get_hash(get_candidate_link(c)), matching_candidates) + map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) if h is not None } @@ -507,7 +451,7 @@ def open_local_or_remote_file(link, session): """ url = link.url_without_fragment - if is_file_url(link): + if link.is_file: # Local URL local_path = url_to_path(url) if os.path.isdir(local_path): diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py index 7e856fe645..550069db7b 100644 --- a/pipenv/patched/piptools/resolver.py +++ b/pipenv/patched/piptools/resolver.py @@ -6,12 +6,11 @@ from functools import partial from itertools import chain, count +from pip_shims.shims import install_req_from_line from pipenv.vendor.requirementslib.models.markers import normalize_marker_str from packaging.markers import Marker from . import click -from ._compat import install_req_from_line -from .cache import DependencyCache from .logging import log from .utils import ( UNSAFE_PACKAGES, @@ -21,7 +20,6 @@ is_pinned_requirement, is_url_requirement, key_from_ireq, - key_from_req, ) green = partial(click.style, fg="green") @@ -35,7 +33,7 @@ class RequirementSummary(object): def __init__(self, ireq): self.req = ireq.req - self.key = key_from_req(ireq.req) + self.key = key_from_ireq(ireq) self.extras = str(sorted(ireq.extras)) self.markers = ireq.markers self.specifier = str(ireq.specifier) @@ -65,7 +63,8 @@ def combine_install_requirements(ireqs): combined_ireq = copy.deepcopy(source_ireqs[0]) for ireq in source_ireqs[1:]: # NOTE we may be losing some info on dropped reqs here - combined_ireq.req.specifier &= ireq.req.specifier + if combined_ireq.req is not None and ireq.req is not None: + combined_ireq.req.specifier &= ireq.req.specifier combined_ireq.constraint &= ireq.constraint if ireq.markers and not combined_ireq.markers: combined_ireq.markers = copy.deepcopy(ireq.markers) @@ -110,7 +109,7 @@ def __init__( self, constraints, repository, - cache=None, + cache, prereleases=False, clear_caches=False, allow_unsafe=False, @@ -123,8 +122,6 @@ def __init__( self.our_constraints = set(constraints) self.their_constraints = set() self.repository = repository - if cache is None: - cache = DependencyCache() # pragma: no cover self.dependency_cache = cache self.prereleases = prereleases self.clear_caches = clear_caches @@ -174,8 +171,8 @@ def resolve(self, max_rounds=10): raise RuntimeError( "No stable configuration of concrete packages " "could be found for the given constraints after " - "%d rounds of resolving.\n" - "This is likely a bug." % max_rounds + "{max_rounds} rounds of resolving.\n" + "This is likely a bug.".format(max_rounds=max_rounds) ) log.debug("") @@ -238,13 +235,6 @@ def _group_constraints(self, constraints): """ for _, ireqs in full_groupby(constraints, key=key_from_ireq): - ireqs = list(ireqs) - editable_ireq = next((ireq for ireq in ireqs if ireq.editable), None) - if editable_ireq: - # ignore all the other specs: the editable one is the one that counts - yield editable_ireq - continue - yield combine_install_requirements(ireqs) def _resolve_one_round(self): @@ -292,12 +282,10 @@ def _resolve_one_round(self): if has_changed: log.debug("") log.debug("New dependencies found in this round:") - for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)): + for new_dependency in sorted(diff, key=key_from_ireq): log.debug(" adding {}".format(new_dependency)) log.debug("Removed dependencies in this round:") - for removed_dependency in sorted( - removed, key=lambda req: key_from_req(req.req) - ): + for removed_dependency in sorted(removed, key=key_from_ireq): log.debug(" removing {}".format(removed_dependency)) # Store the last round's results in the their_constraints @@ -339,6 +327,8 @@ def get_best_match(self, ireq): ) ) best_match.comes_from = ireq.comes_from + if hasattr(ireq, "_source_ireqs"): + best_match._source_ireqs = ireq._source_ireqs return best_match def _iter_dependencies(self, ireq): @@ -350,6 +340,17 @@ def _iter_dependencies(self, ireq): Editable requirements will never be looked up, as they may have changed at any time. """ + # Pip does not resolve dependencies of constraints. We skip handling + # constraints here as well to prevent the cache from being polluted. + # Constraints that are later determined to be dependencies will be + # marked as non-constraints in later rounds by + # `combine_install_requirements`, and will be properly resolved. + # See https://github.com/pypa/pip/ + # blob/6896dfcd831330c13e076a74624d95fa55ff53f4/src/pip/_internal/ + # legacy_resolve.py#L325 + if ireq.constraint: + return + if ireq.editable or (is_url_requirement(ireq) and not ireq.link.is_wheel): for dependency in self.repository.get_dependencies(ireq): yield dependency diff --git a/pipenv/patched/piptools/scripts/compile.py b/pipenv/patched/piptools/scripts/compile.py old mode 100644 new mode 100755 index 5ac16e3586..f83b13ef43 --- a/pipenv/patched/piptools/scripts/compile.py +++ b/pipenv/patched/piptools/scripts/compile.py @@ -2,33 +2,30 @@ from __future__ import absolute_import, division, print_function, unicode_literals import os +import shlex import sys import tempfile from click.utils import safecall +from ._compat import InstallCommand +from ._compat import install_req_from_line from .. import click -from .._compat import install_req_from_line, parse_requirements +from .._compat import parse_requirements +from ..cache import DependencyCache from ..exceptions import PipToolsError +from ..locations import CACHE_DIR from ..logging import log from ..repositories import LocalRequirementsRepository, PyPIRepository from ..resolver import Resolver -from ..utils import ( - UNSAFE_PACKAGES, - create_install_command, - dedup, - get_trusted_hosts, - is_pinned_requirement, - key_from_ireq, - key_from_req, -) +from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq from ..writer import OutputWriter DEFAULT_REQUIREMENTS_FILE = "requirements.in" DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt" # Get default values of the pip's options (including options from pipenv.patched.notpip.conf). -install_command = create_install_command() +install_command = InstallComand() pip_defaults = install_command.parser.get_default_values() @@ -161,7 +158,7 @@ @click.option( "--build-isolation/--no-build-isolation", is_flag=True, - default=False, + default=True, help="Enable isolation when building a modern source distribution. " "Build dependencies specified by PEP 518 must be already installed " "if build isolation is disabled.", @@ -172,6 +169,16 @@ default=True, help="Add the find-links option to generated file", ) +@click.option( + "--cache-dir", + help="Store the cache data in DIRECTORY.", + default=CACHE_DIR, + envvar="PIP_TOOLS_CACHE_DIR", + show_default=True, + show_envvar=True, + type=click.Path(file_okay=False, writable=True), +) +@click.option("--pip-args", help="Arguments to pass directly to the pip command.") def cli( ctx, verbose, @@ -198,6 +205,8 @@ def cli( max_rounds, build_isolation, emit_find_links, + cache_dir, + pip_args, ): """Compiles requirements.txt from requirements.in specs.""" log.verbosity = verbose - quiet @@ -241,6 +250,7 @@ def cli( # Setup ### + right_args = shlex.split(pip_args or "") pip_args = [] if find_links: for link in find_links: @@ -260,32 +270,42 @@ def cli( for host in trusted_host: pip_args.extend(["--trusted-host", host]) - repository = PyPIRepository(pip_args, build_isolation=build_isolation) + if not build_isolation: + pip_args.append("--no-build-isolation") + pip_args.extend(right_args) + + repository = PyPIRepository(pip_args, cache_dir=cache_dir) # Parse all constraints coming from --upgrade-package/-P upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages) upgrade_install_reqs = { - key_from_req(install_req.req): install_req for install_req in upgrade_reqs_gen + key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen } + existing_pins_to_upgrade = set() + # Proxy with a LocalRequirementsRepository if --upgrade is not specified # (= default invocation) if not upgrade and os.path.exists(output_file.name): + # Use a temporary repository to ensure outdated(removed) options from + # existing requirements.txt wouldn't get into the current repository. + tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir) ireqs = parse_requirements( output_file.name, - finder=repository.finder, - session=repository.session, - options=repository.options, + finder=tmp_repository.finder, + session=tmp_repository.session, + options=tmp_repository.options, ) # Exclude packages from --upgrade-package/-P from the existing - # constraints - existing_pins = { - key_from_req(ireq.req): ireq - for ireq in ireqs - if is_pinned_requirement(ireq) - and key_from_req(ireq.req) not in upgrade_install_reqs - } + # constraints, and separately gather pins to be upgraded + existing_pins = {} + for ireq in filter(is_pinned_requirement, ireqs): + key = key_from_ireq(ireq) + if key in upgrade_install_reqs: + existing_pins_to_upgrade.add(key) + else: + existing_pins[key] = ireq repository = LocalRequirementsRepository(existing_pins, repository) ### @@ -306,10 +326,14 @@ def cli( dist = run_setup(src_file) tmpfile.write("\n".join(dist.install_requires)) + comes_from = "{name} ({filename})".format( + name=dist.get_name(), filename=src_file + ) else: tmpfile.write(sys.stdin.read()) + comes_from = "-r -" tmpfile.flush() - constraints.extend( + reqs = list( parse_requirements( tmpfile.name, finder=repository.finder, @@ -317,6 +341,9 @@ def cli( options=repository.options, ) ) + for req in reqs: + req.comes_from = comes_from + constraints.extend(reqs) else: constraints.extend( parse_requirements( @@ -331,7 +358,10 @@ def cli( key_from_ireq(ireq) for ireq in constraints if not ireq.constraint } - constraints.extend(upgrade_install_reqs.values()) + allowed_upgrades = primary_packages | existing_pins_to_upgrade + constraints.extend( + ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades + ) # Filter out pip environment markers which do not match (PEP496) constraints = [ @@ -353,6 +383,7 @@ def cli( constraints, repository, prereleases=repository.finder.allow_all_prereleases or pre, + cache=DependencyCache(cache_dir), clear_caches=rebuild, allow_unsafe=allow_unsafe, ) @@ -371,33 +402,6 @@ def cli( # Output ## - # Compute reverse dependency annotations statically, from the - # dependency cache that the resolver has populated by now. - # - # TODO (1a): reverse deps for any editable package are lost - # what SHOULD happen is that they are cached in memory, just - # not persisted to disk! - # - # TODO (1b): perhaps it's easiest if the dependency cache has an API - # that could take InstallRequirements directly, like: - # - # cache.set(ireq, ...) - # - # then, when ireq is editable, it would store in - # - # editables[egg_name][link_without_fragment] = deps - # editables['pip-tools']['git+...ols.git@future'] = { - # 'click>=3.0', 'six' - # } - # - # otherwise: - # - # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'} - # - reverse_dependencies = None - if annotate: - reverse_dependencies = resolver.reverse_dependencies(results) - writer = OutputWriter( src_files, output_file, @@ -410,7 +414,7 @@ def cli( generate_hashes=generate_hashes, default_index_url=repository.DEFAULT_INDEX_URL, index_urls=repository.finder.index_urls, - trusted_hosts=get_trusted_hosts(repository.finder), + trusted_hosts=repository.finder.trusted_hosts, format_control=repository.finder.format_control, allow_unsafe=allow_unsafe, find_links=repository.finder.find_links, @@ -419,8 +423,6 @@ def cli( writer.write( results=results, unsafe_requirements=resolver.unsafe_constraints, - reverse_dependencies=reverse_dependencies, - primary_packages=primary_packages, markers={ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers }, diff --git a/pipenv/patched/piptools/scripts/sync.py b/pipenv/patched/piptools/scripts/sync.py old mode 100644 new mode 100755 index 40c086a4f0..4a7b3d5868 --- a/pipenv/patched/piptools/scripts/sync.py +++ b/pipenv/patched/piptools/scripts/sync.py @@ -1,13 +1,18 @@ # coding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals +import itertools import os +import shlex import sys +from ._compat import get_installed_distributions, InstallCommand + from .. import click, sync -from .._compat import get_installed_distributions, parse_requirements +from .._compat import parse_requirements from ..exceptions import PipToolsError from ..logging import log +from ..repositories import PyPIRepository from ..utils import flat_map DEFAULT_REQUIREMENTS_FILE = "requirements.txt" @@ -68,6 +73,7 @@ "the private key and the certificate in PEM format.", ) @click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1) +@click.option("--pip-args", help="Arguments to pass directly to pip install.") def cli( ask, dry_run, @@ -82,6 +88,7 @@ def cli( cert, client_cert, src_files, + pip_args, ): """Synchronize virtual environment with requirements.txt.""" if not src_files: @@ -104,8 +111,15 @@ def cli( log.error("ERROR: " + msg) sys.exit(2) + install_command = InstallCommand() + options, _ = install_command.parse_args([]) + session = install_command._build_session(options) + finder = install_command._build_package_finder(options=options, session=session) + + # Parse requirements file. Note, all options inside requirements file + # will be collected by the finder. requirements = flat_map( - lambda src: parse_requirements(src, session=True), src_files + lambda src: parse_requirements(src, finder=finder, session=session), src_files ) try: @@ -117,26 +131,17 @@ def cli( installed_dists = get_installed_distributions(skip=[], user_only=user_only) to_install, to_uninstall = sync.diff(requirements, installed_dists) - install_flags = [] - for link in find_links or []: - install_flags.extend(["-f", link]) - if no_index: - install_flags.append("--no-index") - if index_url: - install_flags.extend(["-i", index_url]) - if extra_index_url: - for extra_index in extra_index_url: - install_flags.extend(["--extra-index-url", extra_index]) - if trusted_host: - for host in trusted_host: - install_flags.extend(["--trusted-host", host]) - if user_only: - install_flags.append("--user") - if cert: - install_flags.extend(["--cert", cert]) - if client_cert: - install_flags.extend(["--client-cert", client_cert]) - + install_flags = _compose_install_flags( + finder, + no_index=no_index, + index_url=index_url, + extra_index_url=extra_index_url, + trusted_host=trusted_host, + find_links=find_links, + user_only=user_only, + cert=cert, + client_cert=client_cert, + ) + shlex.split(pip_args or "") sys.exit( sync.sync( to_install, @@ -147,3 +152,65 @@ def cli( ask=ask, ) ) + + +def _compose_install_flags( + finder, + no_index=False, + index_url=None, + extra_index_url=None, + trusted_host=None, + find_links=None, + user_only=False, + cert=None, + client_cert=None, +): + """ + Compose install flags with the given finder and CLI options. + """ + result = [] + + # Build --index-url/--extra-index-url/--no-index + if no_index: + result.append("--no-index") + elif index_url: + result.extend(["--index-url", index_url]) + elif finder.index_urls: + finder_index_url = finder.index_urls[0] + if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL: + result.extend(["--index-url", finder_index_url]) + for extra_index in finder.index_urls[1:]: + result.extend(["--extra-index-url", extra_index]) + else: + result.append("--no-index") + + for extra_index in extra_index_url or []: + result.extend(["--extra-index-url", extra_index]) + + # Build --trusted-hosts + for host in itertools.chain(trusted_host or [], finder.trusted_hosts): + result.extend(["--trusted-host", host]) + + # Build --find-links + for link in itertools.chain(find_links or [], finder.find_links): + result.extend(["--find-links", link]) + + # Build format controls --no-binary/--only-binary + for format_control in ("no_binary", "only_binary"): + formats = getattr(finder.format_control, format_control) + if not formats: + continue + result.extend( + ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))] + ) + + if user_only: + result.append("--user") + + if cert: + result.extend(["--cert", cert]) + + if client_cert: + result.extend(["--client-cert", client_cert]) + + return result diff --git a/pipenv/patched/piptools/sync.py b/pipenv/patched/piptools/sync.py index 00b1ae8e34..015ff7a1be 100644 --- a/pipenv/patched/piptools/sync.py +++ b/pipenv/patched/piptools/sync.py @@ -4,8 +4,10 @@ import tempfile from subprocess import check_call # nosec +from ._compat import DEV_PKGS +from ._compat import stdlib_pkgs + from . import click -from ._compat import DEV_PKGS, stdlib_pkgs from .exceptions import IncompatibleRequirements from .utils import ( flat_map, @@ -80,18 +82,19 @@ def merge(requirements, ignore_conflicts): # Limitation: URL requirements are merged by precise string match, so # "file:///example.zip#egg=example", "file:///example.zip", and # "example==1.0" will not merge with each other - key = key_from_ireq(ireq) - - if not ignore_conflicts: - existing_ireq = by_key.get(key) - if existing_ireq: - # NOTE: We check equality here since we can assume that the - # requirements are all pinned - if ireq.specifier != existing_ireq.specifier: - raise IncompatibleRequirements(ireq, existing_ireq) - - # TODO: Always pick the largest specifier in case of a conflict - by_key[key] = ireq + if ireq.match_markers(): + key = key_from_ireq(ireq) + + if not ignore_conflicts: + existing_ireq = by_key.get(key) + if existing_ireq: + # NOTE: We check equality here since we can assume that the + # requirements are all pinned + if ireq.specifier != existing_ireq.specifier: + raise IncompatibleRequirements(ireq, existing_ireq) + + # TODO: Always pick the largest specifier in case of a conflict + by_key[key] = ireq return by_key.values() diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index 6bd01c0bb5..aa93ec88ac 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -8,13 +8,14 @@ import six from click.utils import LazyFile +from ._compat import install_req_from_line from six.moves import shlex_quote from pipenv.vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier from pipenv.vendor.packaging.version import Version, InvalidVersion, parse as parse_version from pipenv.vendor.packaging.markers import Marker, Op, Value, Variable -from ._compat import PIP_VERSION, InstallCommand, install_req_from_line +from ._compat import PIP_VERSION from .click import style UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"} @@ -25,6 +26,7 @@ "--upgrade", "--upgrade-package", "--verbose", + "--cache-dir", } @@ -240,7 +242,7 @@ def as_tuple(ireq): if not is_pinned_requirement(ireq): raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq)) - name = key_from_req(ireq.req) + name = key_from_ireq(ireq) version = next(iter(ireq.specifier._specs))._spec[1] extras = tuple(sorted(ireq.extras)) return name, version, extras @@ -289,7 +291,7 @@ def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False): ... 'q': ['qux', 'quux'] ... } - The values of the resulting lookup table will be values, not sets. + The values of the resulting lookup table will be lists, not sets. For extra power, you can even change the values while building up the LUT. To do so, use the `keyval` function instead of the `key` arg: @@ -336,7 +338,7 @@ def keyval(v): def dedup(iterable): """Deduplicate an iterable object like iter(set(iterable)) but - order-reserved. + order-preserved. """ return iter(OrderedDict.fromkeys(iterable)) @@ -381,7 +383,10 @@ def get_hashes_from_ireq(ireq): in the requirement options. """ result = [] - ireq_hashes = ireq.options.get("hashes", {}) + if PIP_VERSION[:2] <= (20, 0): + ireq_hashes = ireq.options.get("hashes", {}) + else: + ireq_hashes = ireq.hash_options for algorithm, hexdigests in ireq_hashes.items(): for hash_ in hexdigests: result.append("{}:{}".format(algorithm, hash_)) @@ -427,6 +432,10 @@ def get_compile_command(click_ctx): # Collect variadic args separately, they will be added # at the end of the command later if option.nargs < 0: + # These will necessarily be src_files + # Re-add click-stripped '--' if any start with '-' + if any(val.startswith("-") and val != "-" for val in value): + right_args.append("--") right_args.extend([shlex_quote(force_text(val)) for val in value]) continue @@ -465,32 +474,20 @@ def get_compile_command(click_ctx): left_args.append(shlex_quote(arg)) # Append to args the option with a value else: - left_args.append( - "{option}={value}".format( - option=option_long_name, value=shlex_quote(force_text(val)) + if option.name == "pip_args": + # shlex_quote would produce functional but noisily quoted results, + # e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"'' + # Instead, we try to get more legible quoting via repr: + left_args.append( + "{option}={value}".format( + option=option_long_name, value=repr(fs_str(force_text(val))) + ) + ) + else: + left_args.append( + "{option}={value}".format( + option=option_long_name, value=shlex_quote(force_text(val)) + ) ) - ) return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args)) - - -def create_install_command(): - """ - Return an instance of InstallCommand. - """ - if PIP_VERSION < (19, 3): - return InstallCommand() - - from pipenv.patched.notpip._internal.commands import create_command - - return create_command("install") - - -def get_trusted_hosts(finder): - """ - Returns an iterable of trusted hosts from a given finder. - """ - if PIP_VERSION < (19, 2): - return (host for _, host, _ in finder.secure_origins) - - return finder.trusted_hosts diff --git a/pipenv/patched/piptools/writer.py b/pipenv/patched/piptools/writer.py index 47cfbbc4a9..694fd3cc12 100644 --- a/pipenv/patched/piptools/writer.py +++ b/pipenv/patched/piptools/writer.py @@ -1,8 +1,11 @@ from __future__ import unicode_literals import os +import re from itertools import chain +import six + from .click import unstyle from .logging import log from .utils import ( @@ -36,6 +39,15 @@ ) +strip_comes_from_line_re = re.compile(r" \(line \d+\)$") + + +def _comes_from_as_string(ireq): + if isinstance(ireq.comes_from, six.string_types): + return strip_comes_from_line_re.sub("", ireq.comes_from) + return key_from_ireq(ireq.comes_from) + + class OutputWriter(object): def __init__( self, @@ -102,9 +114,9 @@ def write_trusted_hosts(self): yield "--trusted-host {}".format(trusted_host) def write_format_controls(self): - for nb in dedup(self.format_control.no_binary): + for nb in dedup(sorted(self.format_control.no_binary)): yield "--no-binary {}".format(nb) - for ob in dedup(self.format_control.only_binary): + for ob in dedup(sorted(self.format_control.only_binary)): yield "--only-binary {}".format(ob) def write_find_links(self): @@ -125,19 +137,9 @@ def write_flags(self): if emitted: yield "" - def _iter_lines( - self, - results, - unsafe_requirements=None, - reverse_dependencies=None, - primary_packages=None, - markers=None, - hashes=None, - ): + def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=None): # default values unsafe_requirements = unsafe_requirements or [] - reverse_dependencies = reverse_dependencies or {} - primary_packages = primary_packages or [] markers = markers or {} hashes = hashes or {} @@ -169,11 +171,7 @@ def _iter_lines( yield MESSAGE_UNHASHED_PACKAGE warn_uninstallable = True line = self._format_requirement( - ireq, - reverse_dependencies, - primary_packages, - markers.get(key_from_ireq(ireq)), - hashes=hashes, + ireq, markers.get(key_from_ireq(ireq)), hashes=hashes ) yield line yielded = True @@ -194,11 +192,7 @@ def _iter_lines( yield comment("# {}".format(ireq_key)) else: line = self._format_requirement( - ireq, - reverse_dependencies, - primary_packages, - marker=markers.get(ireq_key), - hashes=hashes, + ireq, marker=markers.get(ireq_key), hashes=hashes ) yield line @@ -209,41 +203,32 @@ def _iter_lines( if warn_uninstallable: log.warning(MESSAGE_UNINSTALLABLE) - def write( - self, - results, - unsafe_requirements, - reverse_dependencies, - primary_packages, - markers, - hashes, - ): + def write(self, results, unsafe_requirements, markers, hashes): - for line in self._iter_lines( - results, - unsafe_requirements, - reverse_dependencies, - primary_packages, - markers, - hashes, - ): + for line in self._iter_lines(results, unsafe_requirements, markers, hashes): log.info(line) if not self.dry_run: self.dst_file.write(unstyle(line).encode("utf-8")) self.dst_file.write(os.linesep.encode("utf-8")) - def _format_requirement( - self, ireq, reverse_dependencies, primary_packages, marker=None, hashes=None - ): + def _format_requirement(self, ireq, marker=None, hashes=None): ireq_hashes = (hashes if hashes is not None else {}).get(ireq) line = format_requirement(ireq, marker=marker, hashes=ireq_hashes) - if not self.annotate or key_from_ireq(ireq) in primary_packages: + if not self.annotate: return line - # Annotate what packages this package is required by - required_by = reverse_dependencies.get(ireq.name.lower(), []) + # Annotate what packages or reqs-ins this package is required by + required_by = set() + if hasattr(ireq, "_source_ireqs"): + required_by |= { + _comes_from_as_string(src_ireq) + for src_ireq in ireq._source_ireqs + if src_ireq.comes_from + } + elif ireq.comes_from: + required_by.add(_comes_from_as_string(ireq)) if required_by: annotation = ", ".join(sorted(required_by)) line = "{:24}{}{}".format( From d8f0100c084b1949d0d6b5fd1fd00f6e333defcd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 15:28:30 -0400 Subject: [PATCH 02/12] Update patches for pip and piptools Signed-off-by: Dan Ryan --- .../patched/_post-pip-update-pep425tags.patch | 50 -- .../patched/{pip19.patch => pip20.patch} | 326 ++++++----- .../vendoring/patches/patched/piptools.patch | 528 ++++++++++-------- 3 files changed, 447 insertions(+), 457 deletions(-) delete mode 100644 tasks/vendoring/patches/patched/_post-pip-update-pep425tags.patch rename tasks/vendoring/patches/patched/{pip19.patch => pip20.patch} (86%) diff --git a/tasks/vendoring/patches/patched/_post-pip-update-pep425tags.patch b/tasks/vendoring/patches/patched/_post-pip-update-pep425tags.patch deleted file mode 100644 index b552a7b06e..0000000000 --- a/tasks/vendoring/patches/patched/_post-pip-update-pep425tags.patch +++ /dev/null @@ -1,50 +0,0 @@ -diff --git a/pipenv/patched/notpip/_internal/pep425tags.py b/pipenv/patched/notpip/_internal/pep425tags.py -index 369275a8..16d041d9 100644 ---- a/pipenv/patched/notpip/_internal/pep425tags.py -+++ b/pipenv/patched/notpip/_internal/pep425tags.py -@@ -3,6 +3,7 @@ from __future__ import absolute_import - - import distutils.util - import logging -+import os - import platform - import re - import sys -@@ -170,8 +171,9 @@ def is_linux_armhf(): - return False - # hard-float ABI can be detected from the ELF header of the running - # process -+ sys_executable = os.environ.get('PIP_PYTHON_PATH', sys.executable) - try: -- with open(sys.executable, 'rb') as f: -+ with open(sys_executable, 'rb') as f: - elf_header_raw = f.read(40) # read 40 first bytes of ELF header - except (IOError, OSError, TypeError): - return False -@@ -205,7 +207,7 @@ def is_manylinux1_compatible(): - pass - - # Check glibc version. CentOS 5 uses glibc 2.5. -- return pip._internal.utils.glibc.have_compatible_glibc(2, 5) -+ return pipenv.patched.notpip._internal.utils.glibc.have_compatible_glibc(2, 5) - - - def is_manylinux2010_compatible(): -@@ -223,7 +225,7 @@ def is_manylinux2010_compatible(): - pass - - # Check glibc version. CentOS 6 uses glibc 2.12. -- return pip._internal.utils.glibc.have_compatible_glibc(2, 12) -+ return pipenv.patched.notpip._internal.utils.glibc.have_compatible_glibc(2, 12) - - - def is_manylinux2014_compatible(): -@@ -249,7 +251,7 @@ def is_manylinux2014_compatible(): - pass - - # Check glibc version. CentOS 7 uses glibc 2.17. -- return pip._internal.utils.glibc.have_compatible_glibc(2, 17) -+ return pipenv.patched.notpip._internal.utils.glibc.have_compatible_glibc(2, 17) - - - def get_darwin_arches(major, minor, machine): diff --git a/tasks/vendoring/patches/patched/pip19.patch b/tasks/vendoring/patches/patched/pip20.patch similarity index 86% rename from tasks/vendoring/patches/patched/pip19.patch rename to tasks/vendoring/patches/patched/pip20.patch index fa0a7dea39..940647f0f3 100644 --- a/tasks/vendoring/patches/patched/pip19.patch +++ b/tasks/vendoring/patches/patched/pip20.patch @@ -1,5 +1,5 @@ diff --git a/pipenv/patched/pip/_internal/build_env.py b/pipenv/patched/pip/_internal/build_env.py -index 5e6dc460..0412f635 100644 +index f55f0e6b..dc7ece39 100644 --- a/pipenv/patched/pip/_internal/build_env.py +++ b/pipenv/patched/pip/_internal/build_env.py @@ -169,8 +169,9 @@ class BuildEnvironment(object): @@ -13,11 +13,98 @@ index 5e6dc460..0412f635 100644 '--ignore-installed', '--no-user', '--prefix', prefix.path, '--no-warn-script-location', ] # type: List[str] +diff --git a/pipenv/patched/pip/_internal/commands/__init__.py b/pipenv/patched/pip/_internal/commands/__init__.py +index 2a311f8f..9f2f1aaa 100644 +--- a/pipenv/patched/pip/_internal/commands/__init__.py ++++ b/pipenv/patched/pip/_internal/commands/__init__.py +@@ -21,7 +21,7 @@ CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') + + # The ordering matters for help display. + # Also, even though the module path starts with the same +-# "pip._internal.commands" prefix in each case, we include the full path ++# "pipenv.patched.notpip._internal.commands" prefix in each case, we include the full path + # because it makes testing easier (specifically when modifying commands_dict + # in test setup / teardown by adding info for a FakeCommand class defined + # in a test-related module). +@@ -29,59 +29,59 @@ CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') + # so that the ordering won't be lost when using Python 2.7. + commands_dict = OrderedDict([ + ('install', CommandInfo( +- 'pip._internal.commands.install', 'InstallCommand', ++ 'pipenv.patched.notpip._internal.commands.install', 'InstallCommand', + 'Install packages.', + )), + ('download', CommandInfo( +- 'pip._internal.commands.download', 'DownloadCommand', ++ 'pipenv.patched.notpip._internal.commands.download', 'DownloadCommand', + 'Download packages.', + )), + ('uninstall', CommandInfo( +- 'pip._internal.commands.uninstall', 'UninstallCommand', ++ 'pipenv.patched.notpip._internal.commands.uninstall', 'UninstallCommand', + 'Uninstall packages.', + )), + ('freeze', CommandInfo( +- 'pip._internal.commands.freeze', 'FreezeCommand', ++ 'pipenv.patched.notpip._internal.commands.freeze', 'FreezeCommand', + 'Output installed packages in requirements format.', + )), + ('list', CommandInfo( +- 'pip._internal.commands.list', 'ListCommand', ++ 'pipenv.patched.notpip._internal.commands.list', 'ListCommand', + 'List installed packages.', + )), + ('show', CommandInfo( +- 'pip._internal.commands.show', 'ShowCommand', ++ 'pipenv.patched.notpip._internal.commands.show', 'ShowCommand', + 'Show information about installed packages.', + )), + ('check', CommandInfo( +- 'pip._internal.commands.check', 'CheckCommand', ++ 'pipenv.patched.notpip._internal.commands.check', 'CheckCommand', + 'Verify installed packages have compatible dependencies.', + )), + ('config', CommandInfo( +- 'pip._internal.commands.configuration', 'ConfigurationCommand', ++ 'pipenv.patched.notpip._internal.commands.configuration', 'ConfigurationCommand', + 'Manage local and global configuration.', + )), + ('search', CommandInfo( +- 'pip._internal.commands.search', 'SearchCommand', ++ 'pipenv.patched.notpip._internal.commands.search', 'SearchCommand', + 'Search PyPI for packages.', + )), + ('wheel', CommandInfo( +- 'pip._internal.commands.wheel', 'WheelCommand', ++ 'pipenv.patched.notpip._internal.commands.wheel', 'WheelCommand', + 'Build wheels from your requirements.', + )), + ('hash', CommandInfo( +- 'pip._internal.commands.hash', 'HashCommand', ++ 'pipenv.patched.notpip._internal.commands.hash', 'HashCommand', + 'Compute hashes of package archives.', + )), + ('completion', CommandInfo( +- 'pip._internal.commands.completion', 'CompletionCommand', ++ 'pipenv.patched.notpip._internal.commands.completion', 'CompletionCommand', + 'A helper command used for command completion.', + )), + ('debug', CommandInfo( +- 'pip._internal.commands.debug', 'DebugCommand', ++ 'pipenv.patched.notpip._internal.commands.debug', 'DebugCommand', + 'Show information useful for debugging.', + )), + ('help', CommandInfo( +- 'pip._internal.commands.help', 'HelpCommand', ++ 'pipenv.patched.notpip._internal.commands.help', 'HelpCommand', + 'Show help for commands.', + )), + ]) # type: OrderedDict[str, CommandInfo] diff --git a/pipenv/patched/pip/_internal/commands/install.py b/pipenv/patched/pip/_internal/commands/install.py -index 5842d18d..4e56d0bb 100644 +index 02a187c8..f917e645 100644 --- a/pipenv/patched/pip/_internal/commands/install.py +++ b/pipenv/patched/pip/_internal/commands/install.py -@@ -388,7 +388,7 @@ class InstallCommand(RequirementCommand): +@@ -337,7 +337,7 @@ class InstallCommand(RequirementCommand): else: # If we're not replacing an already installed pip, # we're not modifying it. @@ -26,11 +113,11 @@ index 5842d18d..4e56d0bb 100644 protect_pip_from_modification_on_windows( modifying_pip=modifying_pip ) -diff --git a/pipenv/patched/pip/_internal/index.py b/pipenv/patched/pip/_internal/index.py -index 897444aa..4c61043c 100644 ---- a/pipenv/patched/pip/_internal/index.py -+++ b/pipenv/patched/pip/_internal/index.py -@@ -119,6 +119,7 @@ class LinkEvaluator(object): +diff --git a/pipenv/patched/pip/_internal/index/package_finder.py b/pipenv/patched/pip/_internal/index/package_finder.py +index a74d78db..11128f4d 100644 +--- a/pipenv/patched/pip/_internal/index/package_finder.py ++++ b/pipenv/patched/pip/_internal/index/package_finder.py +@@ -121,6 +121,7 @@ class LinkEvaluator(object): target_python, # type: TargetPython allow_yanked, # type: bool ignore_requires_python=None, # type: Optional[bool] @@ -38,7 +125,7 @@ index 897444aa..4c61043c 100644 ): # type: (...) -> None """ -@@ -137,15 +138,20 @@ class LinkEvaluator(object): +@@ -139,15 +140,20 @@ class LinkEvaluator(object): :param ignore_requires_python: Whether to ignore incompatible PEP 503 "data-requires-python" values in HTML links. Defaults to False. @@ -59,7 +146,7 @@ index 897444aa..4c61043c 100644 self.project_name = project_name -@@ -176,10 +182,10 @@ class LinkEvaluator(object): +@@ -178,10 +184,10 @@ class LinkEvaluator(object): return (False, 'not a file') if ext not in SUPPORTED_EXTENSIONS: return (False, 'unsupported archive format: %s' % ext) @@ -72,7 +159,7 @@ index 897444aa..4c61043c 100644 return (False, 'macosx10 one') if ext == WHEEL_EXTENSION: try: -@@ -191,7 +197,7 @@ class LinkEvaluator(object): +@@ -193,7 +199,7 @@ class LinkEvaluator(object): return (False, reason) supported_tags = self._target_python.get_tags() @@ -81,7 +168,7 @@ index 897444aa..4c61043c 100644 # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. file_tags = wheel.get_formatted_file_tags() -@@ -228,7 +234,7 @@ class LinkEvaluator(object): +@@ -230,7 +236,7 @@ class LinkEvaluator(object): link, version_info=self._target_python.py_version_info, ignore_requires_python=self._ignore_requires_python, ) @@ -90,9 +177,9 @@ index 897444aa..4c61043c 100644 # Return None for the reason text to suppress calling # _log_skipped_link(). return (False, None) -@@ -479,8 +485,8 @@ class CandidateEvaluator(object): - project_name=self._project_name, - ) +@@ -483,8 +489,8 @@ class CandidateEvaluator(object): + + return sorted(filtered_applicable_candidates, key=self._sort_key) - def _sort_key(self, candidate): - # type: (InstallationCandidate) -> CandidateSortingKey @@ -101,7 +188,7 @@ index 897444aa..4c61043c 100644 """ Function to pass as the `key` argument to a call to sorted() to sort InstallationCandidates by preference. -@@ -518,14 +524,18 @@ class CandidateEvaluator(object): +@@ -522,14 +528,18 @@ class CandidateEvaluator(object): if link.is_wheel: # can raise InvalidWheelFilename wheel = Wheel(link.filename) @@ -122,7 +209,7 @@ index 897444aa..4c61043c 100644 if wheel.build_tag is not None: match = re.match(r'^(\d+)(.*)$', wheel.build_tag) build_tag_groups = match.groups() -@@ -603,6 +613,7 @@ class PackageFinder(object): +@@ -607,6 +617,7 @@ class PackageFinder(object): format_control=None, # type: Optional[FormatControl] candidate_prefs=None, # type: CandidatePreferences ignore_requires_python=None, # type: Optional[bool] @@ -130,7 +217,7 @@ index 897444aa..4c61043c 100644 ): # type: (...) -> None """ -@@ -617,6 +628,8 @@ class PackageFinder(object): +@@ -621,6 +632,8 @@ class PackageFinder(object): """ if candidate_prefs is None: candidate_prefs = CandidatePreferences() @@ -139,7 +226,7 @@ index 897444aa..4c61043c 100644 format_control = format_control or FormatControl(set(), set()) -@@ -625,12 +638,16 @@ class PackageFinder(object): +@@ -629,12 +642,16 @@ class PackageFinder(object): self._ignore_requires_python = ignore_requires_python self._link_collector = link_collector self._target_python = target_python @@ -156,7 +243,7 @@ index 897444aa..4c61043c 100644 # Don't include an allow_yanked default value to make sure each call # site considers whether yanked releases are allowed. This also causes # that decision to be made explicit in the calling code, which helps -@@ -668,6 +685,23 @@ class PackageFinder(object): +@@ -672,6 +689,23 @@ class PackageFinder(object): ignore_requires_python=selection_prefs.ignore_requires_python, ) @@ -180,7 +267,7 @@ index 897444aa..4c61043c 100644 @property def search_scope(self): # type: () -> SearchScope -@@ -715,6 +749,7 @@ class PackageFinder(object): +@@ -719,6 +753,7 @@ class PackageFinder(object): target_python=self._target_python, allow_yanked=self._allow_yanked, ignore_requires_python=self._ignore_requires_python, @@ -188,7 +275,7 @@ index 897444aa..4c61043c 100644 ) def _sort_links(self, links): -@@ -763,6 +798,7 @@ class PackageFinder(object): +@@ -767,6 +802,7 @@ class PackageFinder(object): # Convert the Text result to str since InstallationCandidate # accepts str. version=str(result), @@ -197,10 +284,10 @@ index 897444aa..4c61043c 100644 def evaluate_links(self, link_evaluator, links): diff --git a/pipenv/patched/pip/_internal/legacy_resolve.py b/pipenv/patched/pip/_internal/legacy_resolve.py -index c24158f4..37c3197f 100644 +index ca269121..999922b8 100644 --- a/pipenv/patched/pip/_internal/legacy_resolve.py +++ b/pipenv/patched/pip/_internal/legacy_resolve.py -@@ -126,6 +126,7 @@ class Resolver(object): +@@ -121,6 +121,7 @@ class Resolver(object): force_reinstall, # type: bool upgrade_strategy, # type: str py_version_info=None, # type: Optional[Tuple[int, ...]] @@ -208,7 +295,7 @@ index c24158f4..37c3197f 100644 ): # type: (...) -> None super(Resolver, self).__init__() -@@ -152,6 +153,10 @@ class Resolver(object): +@@ -143,6 +144,10 @@ class Resolver(object): self.ignore_requires_python = ignore_requires_python self.use_user_site = use_user_site self._make_install_req = make_install_req @@ -218,18 +305,16 @@ index c24158f4..37c3197f 100644 + self.ignore_requires_python = True self._discovered_dependencies = \ - defaultdict(list) # type: DefaultDict[str, List] -@@ -344,7 +349,8 @@ class Resolver(object): - def _resolve_one( + defaultdict(list) # type: DiscoveredDependencies +@@ -313,6 +318,7 @@ class Resolver(object): self, requirement_set, # type: RequirementSet -- req_to_install # type: InstallRequirement -+ req_to_install, # type: InstallRequirement + req_to_install, # type: InstallRequirement + ignore_requires_python=False, # type: bool ): # type: (...) -> List[InstallRequirement] """Prepare a single requirements file. -@@ -368,11 +374,21 @@ class Resolver(object): +@@ -336,11 +342,21 @@ class Resolver(object): dist = abstract_dist.get_pkg_resources_distribution() # This will raise UnsupportedPythonVersion if the given Python # version isn't compatible with the distribution's Requires-Python. @@ -253,14 +338,15 @@ index c24158f4..37c3197f 100644 more_reqs = [] # type: List[InstallRequirement] def add_req(subreq, extras_requested): -@@ -397,9 +413,13 @@ class Resolver(object): - # can refer to it when adding dependencies. - if not requirement_set.has_requirement(req_to_install.name): +@@ -367,9 +383,14 @@ class Resolver(object): # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. ++ req_to_install.is_direct = True + assert req_to_install.is_direct + available_requested = sorted( + set(dist.extras) & set(req_to_install.extras) + ) - req_to_install.is_direct = True requirement_set.add_requirement( req_to_install, parent_req_name=None, + extras_requested=available_requested, @@ -268,60 +354,60 @@ index c24158f4..37c3197f 100644 if not self.ignore_dependencies: diff --git a/pipenv/patched/pip/_internal/models/candidate.py b/pipenv/patched/pip/_internal/models/candidate.py -index 4d49604d..cdfe65aa 100644 +index 1dc1a576..4cc06026 100644 --- a/pipenv/patched/pip/_internal/models/candidate.py +++ b/pipenv/patched/pip/_internal/models/candidate.py -@@ -16,11 +16,12 @@ class InstallationCandidate(KeyBasedCompareMixin): +@@ -12,11 +12,12 @@ class InstallationCandidate(KeyBasedCompareMixin): """Represents a potential "candidate" for installation. """ -- def __init__(self, project, version, link): -- # type: (Any, str, Link) -> None -+ def __init__(self, project, version, link, requires_python=None): -+ # type: (Any, str, Link, Any) -> None - self.project = project +- def __init__(self, name, version, link): +- # type: (str, str, Link) -> None ++ def __init__(self, name, version, link, requies_python=None): ++ # type: (str, str, Link, Any) -> None + self.name = name self.version = parse_version(version) # type: _BaseVersion self.link = link + self.requires_python = requires_python super(InstallationCandidate, self).__init__( - key=(self.project, self.version, self.link), + key=(self.name, self.version, self.link), diff --git a/pipenv/patched/pip/_internal/operations/prepare.py b/pipenv/patched/pip/_internal/operations/prepare.py -index d0930458..91527ae8 100644 +index 0b61f205..ddfb75a6 100644 --- a/pipenv/patched/pip/_internal/operations/prepare.py +++ b/pipenv/patched/pip/_internal/operations/prepare.py -@@ -140,14 +140,7 @@ class RequirementPreparer(object): +@@ -429,14 +429,7 @@ class RequirementPreparer(object): # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): - raise PreviousBuildDirError( -- "pip can't proceed with requirements '%s' due to a" -- " pre-existing build directory (%s). This is " +- "pip can't proceed with requirements '{}' due to a" +- " pre-existing build directory ({}). This is " - "likely due to a previous installation that failed" - ". pip is being responsible and not assuming it " - "can delete this. Please delete it and try again." -- % (req, req.source_dir) +- .format(req, req.source_dir) - ) + rmtree(req.source_dir) # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors diff --git a/pipenv/patched/pip/_internal/req/req_set.py b/pipenv/patched/pip/_internal/req/req_set.py -index b34a2bb1..afcd2e4f 100644 +index 087ac592..01e7394a 100644 --- a/pipenv/patched/pip/_internal/req/req_set.py +++ b/pipenv/patched/pip/_internal/req/req_set.py @@ -24,8 +24,8 @@ logger = logging.getLogger(__name__) class RequirementSet(object): -- def __init__(self, require_hashes=False, check_supported_wheels=True): -- # type: (bool, bool) -> None -+ def __init__(self, require_hashes=False, check_supported_wheels=True, ignore_compatibility=True): -+ # type: (bool) -> None +- def __init__(self, check_supported_wheels=True): +- # type: (bool) -> None ++ def __init__(self, check_supported_wheels=True, ignore_compatibility=True): ++ # type: (bool, bool) -> None """Create a RequirementSet. """ -@@ -36,6 +36,9 @@ class RequirementSet(object): +@@ -35,6 +35,9 @@ class RequirementSet(object): self.unnamed_requirements = [] # type: List[InstallRequirement] self.successfully_downloaded = [] # type: List[InstallRequirement] self.reqs_to_cleanup = [] # type: List[InstallRequirement] @@ -331,7 +417,7 @@ index b34a2bb1..afcd2e4f 100644 def __str__(self): # type: () -> str -@@ -199,7 +202,7 @@ class RequirementSet(object): +@@ -198,7 +201,7 @@ class RequirementSet(object): if project_name in self.requirements: return self.requirements[project_name] @@ -374,10 +460,10 @@ index 7767111a..52738e16 100644 }, }, diff --git a/pipenv/patched/pip/_internal/utils/misc.py b/pipenv/patched/pip/_internal/utils/misc.py -index b8482635..2fae4e08 100644 +index 4a581601..ef0c127f 100644 --- a/pipenv/patched/pip/_internal/utils/misc.py +++ b/pipenv/patched/pip/_internal/utils/misc.py -@@ -136,8 +136,8 @@ def get_prog(): +@@ -132,8 +132,8 @@ def get_prog(): @retry(stop_max_delay=3000, wait_fixed=500) def rmtree(dir, ignore_errors=False): # type: (str, bool) -> None @@ -410,7 +496,7 @@ index 68aa86ed..8577d387 100644 diff --git a/pipenv/patched/pip/_internal/utils/setuptools_build.py b/pipenv/patched/pip/_internal/utils/setuptools_build.py -index 12d866e0..28649a4d 100644 +index 4147a650..f05fb11c 100644 --- a/pipenv/patched/pip/_internal/utils/setuptools_build.py +++ b/pipenv/patched/pip/_internal/utils/setuptools_build.py @@ -1,3 +1,4 @@ @@ -426,16 +512,16 @@ index 12d866e0..28649a4d 100644 + sys_executable = os.environ.get('PIP_PYTHON_PATH', sys.executable) + args = [sys_executable] if unbuffered_output: - args.append('-u') - args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)]) + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] diff --git a/pipenv/patched/pip/_internal/utils/temp_dir.py b/pipenv/patched/pip/_internal/utils/temp_dir.py -index 77d40be6..8a32cf2d 100644 +index 65e41bc7..9eabf28e 100644 --- a/pipenv/patched/pip/_internal/utils/temp_dir.py +++ b/pipenv/patched/pip/_internal/utils/temp_dir.py -@@ -8,9 +8,11 @@ import itertools - import logging - import os.path - import tempfile +@@ -8,9 +8,11 @@ import tempfile + from contextlib import contextmanager + + from pip._vendor.contextlib2 import ExitStack +import warnings from pip._internal.utils.misc import rmtree @@ -443,15 +529,19 @@ index 77d40be6..8a32cf2d 100644 +from pipenv.vendor.vistir.compat import finalize, ResourceWarning if MYPY_CHECK_RUNNING: - from typing import Optional -@@ -60,6 +62,20 @@ class TempDirectory(object): + from typing import Any, Dict, Iterator, Optional, TypeVar +@@ -119,11 +121,25 @@ class TempDirectory(object): self._deleted = False self.delete = delete self.kind = kind + self._finalizer = None + if self._path: + self._register_finalizer() -+ + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + def _register_finalizer(self): + if self.delete and self._path: + self._finalizer = finalize( @@ -462,10 +552,11 @@ index 77d40be6..8a32cf2d 100644 + ) + else: + self._finalizer = None - ++ @property def path(self): -@@ -92,12 +108,28 @@ class TempDirectory(object): + # type: () -> str +@@ -166,13 +182,29 @@ class TempDirectory(object): logger.debug("Created temporary directory: {}".format(path)) return path @@ -482,6 +573,7 @@ index 77d40be6..8a32cf2d 100644 + warnings.warn(warn_message, ResourceWarning) + def cleanup(self): + # type: () -> None """Remove the temporary directory created and reset state """ - self._deleted = True @@ -497,97 +589,3 @@ index 77d40be6..8a32cf2d 100644 class AdjacentTempDirectory(TempDirectory): -@@ -169,4 +201,4 @@ class AdjacentTempDirectory(TempDirectory): - ) - - logger.debug("Created temporary directory: {}".format(path)) -- return path -+ return path -\ No newline at end of file -diff --git a/pipenv/patched/pip/_internal/commands/__init__.py b/pipenv/patched/pip/_internal/commands/__init__.py -index abcafa55..ca155a94 100644 ---- a/pipenv/patched/pip/_internal/commands/__init__.py -+++ b/pipenv/patched/pip/_internal/commands/__init__.py -@@ -21,7 +21,7 @@ CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') - - # The ordering matters for help display. - # Also, even though the module path starts with the same --# "pip._internal.commands" prefix in each case, we include the full path -+# "pipenv.patched.notpip._internal.commands" prefix in each case, we include the full path - # because it makes testing easier (specifically when modifying commands_dict - # in test setup / teardown by adding info for a FakeCommand class defined - # in a test-related module). -@@ -29,59 +29,59 @@ CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') - # so that the ordering won't be lost when using Python 2.7. - commands_dict = OrderedDict([ - ('install', CommandInfo( -- 'pip._internal.commands.install', 'InstallCommand', -+ 'pipenv.patched.notpip._internal.commands.install', 'InstallCommand', - 'Install packages.', - )), - ('download', CommandInfo( -- 'pip._internal.commands.download', 'DownloadCommand', -+ 'pipenv.patched.notpip._internal.commands.download', 'DownloadCommand', - 'Download packages.', - )), - ('uninstall', CommandInfo( -- 'pip._internal.commands.uninstall', 'UninstallCommand', -+ 'pipenv.patched.notpip._internal.commands.uninstall', 'UninstallCommand', - 'Uninstall packages.', - )), - ('freeze', CommandInfo( -- 'pip._internal.commands.freeze', 'FreezeCommand', -+ 'pipenv.patched.notpip._internal.commands.freeze', 'FreezeCommand', - 'Output installed packages in requirements format.', - )), - ('list', CommandInfo( -- 'pip._internal.commands.list', 'ListCommand', -+ 'pipenv.patched.notpip._internal.commands.list', 'ListCommand', - 'List installed packages.', - )), - ('show', CommandInfo( -- 'pip._internal.commands.show', 'ShowCommand', -+ 'pipenv.patched.notpip._internal.commands.show', 'ShowCommand', - 'Show information about installed packages.', - )), - ('check', CommandInfo( -- 'pip._internal.commands.check', 'CheckCommand', -+ 'pipenv.patched.notpip._internal.commands.check', 'CheckCommand', - 'Verify installed packages have compatible dependencies.', - )), - ('config', CommandInfo( -- 'pip._internal.commands.configuration', 'ConfigurationCommand', -+ 'pipenv.patched.notpip._internal.commands.configuration', 'ConfigurationCommand', - 'Manage local and global configuration.', - )), - ('search', CommandInfo( -- 'pip._internal.commands.search', 'SearchCommand', -+ 'pipenv.patched.notpip._internal.commands.search', 'SearchCommand', - 'Search PyPI for packages.', - )), - ('wheel', CommandInfo( -- 'pip._internal.commands.wheel', 'WheelCommand', -+ 'pipenv.patched.notpip._internal.commands.wheel', 'WheelCommand', - 'Build wheels from your requirements.', - )), - ('hash', CommandInfo( -- 'pip._internal.commands.hash', 'HashCommand', -+ 'pipenv.patched.notpip._internal.commands.hash', 'HashCommand', - 'Compute hashes of package archives.', - )), - ('completion', CommandInfo( -- 'pip._internal.commands.completion', 'CompletionCommand', -+ 'pipenv.patched.notpip._internal.commands.completion', 'CompletionCommand', - 'A helper command used for command completion.', - )), - ('debug', CommandInfo( -- 'pip._internal.commands.debug', 'DebugCommand', -+ 'pipenv.patched.notpip._internal.commands.debug', 'DebugCommand', - 'Show information useful for debugging.', - )), - ('help', CommandInfo( -- 'pip._internal.commands.help', 'HelpCommand', -+ 'pipenv.patched.notpip._internal.commands.help', 'HelpCommand', - 'Show help for commands.', - )), - ]) # type: OrderedDict[str, CommandInfo] diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 45eb963b0d..65378168cc 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -1,101 +1,100 @@ diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py -index eccbf36..fd8ecdd 100644 +index fda80d5..4f7efbf 100644 --- a/pipenv/patched/piptools/_compat/__init__.py +++ b/pipenv/patched/piptools/_compat/__init__.py -@@ -11,6 +11,7 @@ from .pip_compat import ( - FormatControl, - InstallationCandidate, - InstallCommand, +@@ -4,7 +4,37 @@ from __future__ import absolute_import, division, print_function, unicode_litera + + import six + +-from .pip_compat import PIP_VERSION, parse_requirements ++from .pip_compat import ( ++ DEV_PKGS, ++ FAVORITE_HASH, ++ PIP_VERSION, ++ FormatControl, ++ InstallationCandidate, ++ InstallCommand, + InstallationError, - InstallRequirement, - Link, - PackageFinder, -@@ -18,6 +19,8 @@ from .pip_compat import ( - RequirementSet, - RequirementTracker, - Resolver, ++ InstallRequirement, ++ Link, ++ PackageFinder, ++ PyPI, ++ RequirementSet, ++ RequirementTracker, ++ Resolver, + SafeFileCache, + VcsSupport, - Wheel, - WheelCache, - cmdoptions, -@@ -29,6 +32,7 @@ from .pip_compat import ( - is_vcs_url, - parse_requirements, - path_to_url, ++ Wheel, ++ WheelCache, ++ cmdoptions, ++ get_installed_distributions, ++ install_req_from_editable, ++ install_req_from_line, ++ parse_requirements, ++ path_to_url, + pip_version, - stdlib_pkgs, - url_to_path, - user_cache_dir, ++ stdlib_pkgs, ++ url_to_path, ++ user_cache_dir, ++ normalize_path, ++) + + if six.PY2: + from .tempfile import TemporaryDirectory diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py -index 67da307..765bd49 100644 +index 9508b75..103b831 100644 --- a/pipenv/patched/piptools/_compat/pip_compat.py +++ b/pipenv/patched/piptools/_compat/pip_compat.py -@@ -1,26 +1,24 @@ +@@ -1,22 +1,72 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import -- - import importlib --from contextlib import contextmanager -- --import pip --from pip._vendor.packaging.version import parse as parse_version -- --PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split("."))) ++import importlib +import os +from appdirs import user_cache_dir +os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") +import pip_shims.shims +from pip_shims.models import ShimmedPathCollection, ImportTypes --try: -- from pip._internal.req.req_tracker import RequirementTracker --except ImportError: +-import pip +-from pip._internal.req import parse_requirements as _parse_requirements +-from pip._vendor.packaging.version import parse as parse_version +InstallationCandidate = ShimmedPathCollection("InstallationCandidate", ImportTypes.CLASS) +InstallationCandidate.create_path("models.candidate", "18.0", "9999") +InstallationCandidate.create_path("index", "7.0.3", "10.9.9") -- @contextmanager -- def RequirementTracker(): -- yield +-PIP_VERSION = tuple(map(int, parse_version(pip.__version__).base_version.split("."))) +PIP_VERSION = tuple(map(int, pip_shims.shims.parsed_pip_version.parsed_version.base_version.split("."))) +RequirementTracker = pip_shims.shims.RequirementTracker - def do_import(module_path, subimport=None, old_path=None): - old_path = old_path or module_path -- prefixes = ["pip._internal", "pip"] +-if PIP_VERSION[:2] <= (20, 0): ++def do_import(module_path, subimport=None, old_path=None): ++ old_path = old_path or module_path + pip_path = os.environ.get("PIP_SHIMS_BASE_MODULE", "pip") + prefixes = ["{}._internal".format(pip_path), pip_path] - paths = [module_path, old_path] - search_order = [ - "{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None -@@ -37,31 +35,29 @@ def do_import(module_path, subimport=None, old_path=None): - return getattr(imported, package) - - --InstallRequirement = do_import("req.req_install", "InstallRequirement") --InstallationCandidate = do_import( -- "models.candidate", "InstallationCandidate", old_path="index" --) --parse_requirements = do_import("req.req_file", "parse_requirements") --RequirementSet = do_import("req.req_set", "RequirementSet") --user_cache_dir = do_import("utils.appdirs", "user_cache_dir") --FAVORITE_HASH = do_import("utils.hashes", "FAVORITE_HASH") --path_to_url = do_import("utils.urls", "path_to_url", old_path="download") --url_to_path = do_import("utils.urls", "url_to_path", old_path="download") --PackageFinder = do_import("index.package_finder", "PackageFinder", old_path="index") --FormatControl = do_import("models.format_control", "FormatControl", old_path="index") --InstallCommand = do_import("commands.install", "InstallCommand") --Wheel = do_import("wheel", "Wheel") --cmdoptions = do_import("cli.cmdoptions", old_path="cmdoptions") --get_installed_distributions = do_import( -- "utils.misc", "get_installed_distributions", old_path="utils" --) --PyPI = do_import("models.index", "PyPI") --stdlib_pkgs = do_import("utils.compat", "stdlib_pkgs", old_path="compat") --DEV_PKGS = do_import("commands.freeze", "DEV_PKGS") --Link = do_import("models.link", "Link", old_path="index") ++ paths = [module_path, old_path] ++ search_order = [ ++ "{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None ++ ] ++ package = subimport if subimport else None ++ for to_import in search_order: ++ if not subimport: ++ to_import, _, package = to_import.rpartition(".") ++ try: ++ imported = importlib.import_module(to_import) ++ except ImportError: ++ continue ++ else: ++ return getattr(imported, package) + ++if PIP_VERSION[:2] <= (20, 0): + def install_req_from_parsed_requirement(req, **kwargs): + return req + +- + else: + from pip._internal.req.constructors import install_req_from_parsed_requirement + +InstallRequirement = pip_shims.shims.InstallRequirement +InstallationError = pip_shims.shims.InstallationError +parse_requirements = pip_shims.shims.parse_requirements @@ -114,21 +113,41 @@ index 67da307..765bd49 100644 +stdlib_pkgs = pip_shims.shims.stdlib_pkgs +DEV_PKGS = pip_shims.shims.DEV_PKGS +Link = pip_shims.shims.Link - Session = do_import("_vendor.requests.sessions", "Session") --Resolver = do_import("legacy_resolve", "Resolver", old_path="resolve") --WheelCache = do_import("cache", "WheelCache", old_path="wheel") ++Session = do_import("_vendor.requests.sessions", "Session") +Resolver = pip_shims.shims.Resolver +VcsSupport = pip_shims.shims.VcsSupport +WheelCache = pip_shims.shims.WheelCache +pip_version = pip_shims.shims.pip_version ++normalize_path = do_import("utils.misc", "normalize_path") ++install_req_from_line = pip_shims.shims.install_req_from_line ++install_req_from_editable = pip_shims.shims.install_req_from_editable + + def parse_requirements( + filename, session, finder=None, options=None, constraint=False, isolated=False +diff --git a/pipenv/patched/piptools/cache.py b/pipenv/patched/piptools/cache.py +index 9b6bf55..983ddb6 100644 +--- a/pipenv/patched/piptools/cache.py ++++ b/pipenv/patched/piptools/cache.py +@@ -6,7 +6,7 @@ import os + import platform + import sys - # pip 18.1 has refactored InstallRequirement constructors use by pip-tools. - if PIP_VERSION < (18, 1): +-from pip._vendor.packaging.requirements import Requirement ++from pipenv.vendor.packaging.requirements import Requirement + + from .exceptions import PipToolsError + from .utils import as_tuple, key_from_req, lookup_table diff --git a/pipenv/patched/piptools/locations.py b/pipenv/patched/piptools/locations.py -index fb66cf3..bb199f6 100644 +index 9ca0ffe..37125c9 100644 --- a/pipenv/patched/piptools/locations.py +++ b/pipenv/patched/piptools/locations.py -@@ -5,7 +5,10 @@ from ._compat import user_cache_dir +@@ -1,12 +1,15 @@ + import os + from shutil import rmtree + +-from pip._internal.utils.appdirs import user_cache_dir ++from ._compat import user_cache_dir + from .click import secho # The user_cache_dir helper comes straight from pip itself @@ -141,10 +160,21 @@ index fb66cf3..bb199f6 100644 # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py -index f389784..c1bcf9d 100644 +index ec3a796..1aa29f0 100644 --- a/pipenv/patched/piptools/repositories/local.py +++ b/pipenv/patched/piptools/repositories/local.py -@@ -61,7 +61,8 @@ class LocalRequirementsRepository(BaseRepository): +@@ -3,9 +3,7 @@ from __future__ import absolute_import, division, print_function, unicode_litera + + from contextlib import contextmanager + +-from pip._internal.utils.hashes import FAVORITE_HASH +- +-from .._compat import PIP_VERSION ++from .._compat import PIP_VERSION, FAVORITE_HASH + from .base import BaseRepository + + from piptools.utils import as_tuple, key_from_ireq, make_install_requirement +@@ -65,7 +63,8 @@ class LocalRequirementsRepository(BaseRepository): if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): project, version, _ = as_tuple(existing_pin) return make_install_requirement( @@ -155,10 +185,10 @@ index f389784..c1bcf9d 100644 else: return self.repository.find_best_match(ireq, prereleases) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py -index acbd680..13378ae 100644 +index ef5ba4e..fe6e735 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py -@@ -2,21 +2,29 @@ +@@ -2,28 +2,48 @@ from __future__ import absolute_import, division, print_function, unicode_literals import collections @@ -166,46 +196,61 @@ index acbd680..13378ae 100644 import hashlib import os from contextlib import contextmanager - from functools import partial from shutil import rmtree +-from pip._internal.cache import WheelCache +-from pip._internal.commands import create_command +-from pip._internal.models.index import PyPI +-from pip._internal.models.link import Link +-from pip._internal.models.wheel import Wheel +-from pip._internal.req import RequirementSet +-from pip._internal.req.req_tracker import get_requirement_tracker +-from pip._internal.utils.hashes import FAVORITE_HASH +-from pip._internal.utils.misc import normalize_path +-from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager +-from pip._internal.utils.urls import path_to_url, url_to_path +- +-from .._compat import PIP_VERSION, TemporaryDirectory, contextlib ++from pip_shims.shims import ( ++ TempDirectory, ++ global_tempdir_manager, ++ get_requirement_tracker, ++ InstallCommand ++) +from packaging.requirements import Requirement +from packaging.specifiers import Specifier, SpecifierSet + - from .._compat import ( - FAVORITE_HASH, - PIP_VERSION, ++from .._compat import ( ++ FAVORITE_HASH, ++ PIP_VERSION, + InstallationError, + InstallRequirement, - Link, - PyPI, - RequirementSet, - RequirementTracker, - Resolver as PipResolver, ++ Link, ++ normalize_path, ++ PyPI, ++ RequirementSet, ++ RequirementTracker, + SafeFileCache, - TemporaryDirectory, ++ TemporaryDirectory, + VcsSupport, - Wheel, - WheelCache, - contextlib, -@@ -24,6 +32,7 @@ from .._compat import ( - is_file_url, - is_vcs_url, - path_to_url, ++ Wheel, ++ WheelCache, ++ contextlib, ++ path_to_url, + pip_version, - url_to_path, - ) - from ..cache import CACHE_DIR -@@ -31,6 +40,8 @@ from ..click import progressbar ++ url_to_path, ++) ++from ..locations import CACHE_DIR + from ..click import progressbar from ..exceptions import NoCandidateFound from ..logging import log from ..utils import ( + dedup, + clean_requires_python, - create_install_command, fs_str, is_pinned_requirement, -@@ -40,10 +51,50 @@ from ..utils import ( + is_url_requirement, +@@ -32,10 +52,50 @@ from ..utils import ( ) from .base import BaseRepository @@ -256,29 +301,38 @@ index acbd680..13378ae 100644 class PyPIRepository(BaseRepository): DEFAULT_INDEX_URL = PyPI.simple_url -@@ -54,8 +105,9 @@ class PyPIRepository(BaseRepository): +@@ -46,19 +106,27 @@ class PyPIRepository(BaseRepository): changed/configured on the Finder. """ -- def __init__(self, pip_args, build_isolation=False): -+ def __init__(self, pip_args, session=None, build_isolation=False, use_json=False): - self.build_isolation = build_isolation +- def __init__(self, pip_args, cache_dir): ++ def __init__(self, pip_args, cache_dir=CACHE_DIR, session=None, build_isolation=False, use_json=False): ++ self.build_isolation = build_isolation + self.use_json = use_json - ++ self.cache_dir = cache_dir ++ # Use pip's parser for pip.conf management and defaults. # General options (find_links, index_url, extra_index_url, trusted_host, -@@ -63,7 +115,9 @@ class PyPIRepository(BaseRepository): - command = create_install_command() - self.options, _ = command.parse_args(pip_args) - -- self.session = command._build_session(self.options) + # and pre) are deferred to pip. +- self.command = create_command("install") ++ self.command = InstallCommand() + self.options, _ = self.command.parse_args(pip_args) ++ if self.build_isolation is not None: ++ self.options.build_isolation = build_isolation + if self.options.cache_dir: + self.options.cache_dir = normalize_path(self.options.cache_dir) + + self.options.require_hashes = False + self.options.ignore_dependencies = False + +- self.session = self.command._build_session(self.options) + if session is None: -+ session = command._build_session(self.options) ++ session = self.command._build_session(self.options) + self.session = session - self.finder = command._build_package_finder( + self.finder = self.command._build_package_finder( options=self.options, session=self.session ) -@@ -78,6 +132,10 @@ class PyPIRepository(BaseRepository): +@@ -73,6 +141,10 @@ class PyPIRepository(BaseRepository): # of all secondary dependencies for the given requirement, so we # only have to go to disk once for each requirement self._dependencies_cache = {} @@ -289,7 +343,7 @@ index acbd680..13378ae 100644 # Setup file paths self.freshen_build_caches() -@@ -118,13 +176,15 @@ class PyPIRepository(BaseRepository): +@@ -114,13 +186,15 @@ class PyPIRepository(BaseRepository): if ireq.editable or is_url_requirement(ireq): return ireq # return itself as the best match @@ -309,8 +363,8 @@ index acbd680..13378ae 100644 # Reuses pip's internal candidate sort key to sort matching_candidates = [candidates_by_version[ver] for ver in matching_versions] -@@ -153,11 +213,74 @@ class PyPIRepository(BaseRepository): - best_candidate.project, +@@ -136,9 +210,66 @@ class PyPIRepository(BaseRepository): + best_candidate.name, best_candidate.version, ireq.extras, + ireq.markers, @@ -374,85 +428,12 @@ index acbd680..13378ae 100644 + return set() + def resolve_reqs(self, download_dir, ireq, wheel_cache): - results = None -+ ireq.isolated = self.build_isolation -+ ireq._wheel_cache = wheel_cache -+ if ireq and not ireq.link: -+ ireq.populate_link(self.finder, False, False) -+ if ireq.link and not ireq.link.is_wheel: -+ ireq.ensure_has_source_dir(self.source_dir) - - if PIP_VERSION < (10,): - reqset = RequirementSet( -@@ -166,11 +289,13 @@ class PyPIRepository(BaseRepository): - download_dir=download_dir, - wheel_download_dir=self._wheel_download_dir, - session=self.session, -+ ignore_installed=True, -+ ignore_compatibility=False, - wheel_cache=wheel_cache, - ) -- results = reqset._prepare_file(self.finder, ireq) -+ results = reqset._prepare_file(self.finder, ireq, ignore_requires_python=True) - else: -- from pip._internal.operations.prepare import RequirementPreparer -+ from pip_shims.shims import RequirementPreparer - - preparer_kwargs = { - "build_dir": self.build_dir, -@@ -186,21 +311,24 @@ class PyPIRepository(BaseRepository): - "upgrade_strategy": "to-satisfy-only", - "force_reinstall": False, - "ignore_dependencies": False, -- "ignore_requires_python": False, -+ "ignore_requires_python": True, - "ignore_installed": True, - "use_user_site": False, -+ "ignore_compatibility": False, -+ "use_pep517": True, - } - make_install_req_kwargs = {"isolated": False, "wheel_cache": wheel_cache} - - if PIP_VERSION < (19, 3): - resolver_kwargs.update(**make_install_req_kwargs) - else: -- from pip._internal.req.constructors import install_req_from_req_string -+ from pipenv.vendor.pip_shims.shims import install_req_from_req_string - - make_install_req = partial( - install_req_from_req_string, **make_install_req_kwargs - ) - resolver_kwargs["make_install_req"] = make_install_req -+ del resolver_kwargs["use_pep517"] - - if PIP_VERSION >= (20,): - preparer_kwargs["session"] = self.session -@@ -208,6 +336,7 @@ class PyPIRepository(BaseRepository): - - resolver = None - preparer = None -+ reqset = None - with RequirementTracker() as req_tracker: - # Pip 18 uses a requirement tracker to prevent fork bombs - if req_tracker: -@@ -216,7 +345,6 @@ class PyPIRepository(BaseRepository): - resolver_kwargs["preparer"] = preparer - reqset = RequirementSet() - ireq.is_direct = True -- reqset.add_requirement(ireq) - - resolver = PipResolver(**resolver_kwargs) - require_hashes = False -@@ -225,12 +353,16 @@ class PyPIRepository(BaseRepository): - results = resolver._resolve_one(reqset, ireq) - else: - results = resolver._resolve_one(reqset, ireq, require_hashes) -+ try: -+ reqset.cleanup_files() -+ except (AttributeError, OSError): -+ pass + with get_requirement_tracker() as req_tracker, TempDirectory( + kind="resolver" +@@ -173,10 +304,11 @@ class PyPIRepository(BaseRepository): -- reqset.cleanup_files() + if PIP_VERSION[:2] <= (20, 0): + reqset.cleanup_files() + results = set(results) if results else set() - return set(results) @@ -463,19 +444,19 @@ index acbd680..13378ae 100644 """ Given a pinned, URL, or editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). -@@ -265,9 +397,8 @@ class PyPIRepository(BaseRepository): - wheel_cache = WheelCache(CACHE_DIR, self.options.format_control) - prev_tracker = os.environ.get("PIP_REQ_TRACKER") - try: -- self._dependencies_cache[ireq] = self.resolve_reqs( -- download_dir, ireq, wheel_cache -- ) -+ results, ireq = self.resolve_reqs(download_dir, ireq, wheel_cache) -+ self._dependencies_cache[ireq] = results - finally: - if "PIP_REQ_TRACKER" in os.environ: - if prev_tracker: -@@ -305,7 +436,7 @@ class PyPIRepository(BaseRepository): +@@ -212,9 +344,10 @@ class PyPIRepository(BaseRepository): + wheel_cache = WheelCache(self._cache_dir, self.options.format_control) + prev_tracker = os.environ.get("PIP_REQ_TRACKER") + try: +- self._dependencies_cache[ireq] = self.resolve_reqs( ++ results, ireq = self.resolve_reqs( + download_dir, ireq, wheel_cache + ) ++ self._dependencies_cache[ireq] = results + finally: + if "PIP_REQ_TRACKER" in os.environ: + if prev_tracker: +@@ -252,7 +385,7 @@ class PyPIRepository(BaseRepository): cached_link = Link(path_to_url(cached_path)) else: cached_link = link @@ -484,7 +465,7 @@ index acbd680..13378ae 100644 if not is_pinned_requirement(ireq): raise TypeError("Expected pinned requirement, got {}".format(ireq)) -@@ -313,12 +444,10 @@ class PyPIRepository(BaseRepository): +@@ -260,39 +393,19 @@ class PyPIRepository(BaseRepository): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. @@ -500,14 +481,10 @@ index acbd680..13378ae 100644 log.debug(" {}".format(ireq.name)) -@@ -328,30 +457,11 @@ class PyPIRepository(BaseRepository): - return candidate.link - return { -- self._get_file_hash(get_candidate_link(candidate)) -- for candidate in matching_candidates +- self._get_file_hash(candidate.link) for candidate in matching_candidates + h for h in -+ map(lambda c: self._hash_cache.get_hash(get_candidate_link(c)), matching_candidates) ++ map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) + if h is not None } @@ -535,30 +512,31 @@ index acbd680..13378ae 100644 def allow_all_wheels(self): """ diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py -index fc53f18..7e856fe 100644 +index 0116992..550069d 100644 --- a/pipenv/patched/piptools/resolver.py +++ b/pipenv/patched/piptools/resolver.py -@@ -6,6 +6,9 @@ import os +@@ -6,7 +6,9 @@ import os from functools import partial from itertools import chain, count +-from pip._internal.req.constructors import install_req_from_line ++from pip_shims.shims import install_req_from_line +from pipenv.vendor.requirementslib.models.markers import normalize_marker_str +from packaging.markers import Marker -+ + from . import click - from ._compat import install_req_from_line - from .cache import DependencyCache -@@ -34,6 +37,7 @@ class RequirementSummary(object): + from .logging import log +@@ -33,6 +35,7 @@ class RequirementSummary(object): self.req = ireq.req - self.key = key_from_req(ireq.req) + self.key = key_from_ireq(ireq) self.extras = str(sorted(ireq.extras)) + self.markers = ireq.markers self.specifier = str(ireq.specifier) def __eq__(self, other): -@@ -63,6 +67,17 @@ def combine_install_requirements(ireqs): - # NOTE we may be losing some info on dropped reqs here - combined_ireq.req.specifier &= ireq.req.specifier +@@ -63,6 +66,17 @@ def combine_install_requirements(ireqs): + if combined_ireq.req is not None and ireq.req is not None: + combined_ireq.req.specifier &= ireq.req.specifier combined_ireq.constraint &= ireq.constraint + if ireq.markers and not combined_ireq.markers: + combined_ireq.markers = copy.deepcopy(ireq.markers) @@ -574,10 +552,10 @@ index fc53f18..7e856fe 100644 # Return a sorted, de-duped tuple of extras combined_ireq.extras = tuple( sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras))) -@@ -335,10 +350,19 @@ class Resolver(object): - Editable requirements will never be looked up, as they may have - changed at any time. - """ +@@ -337,10 +351,19 @@ class Resolver(object): + if ireq.constraint: + return + - if ireq.editable or is_url_requirement(ireq): + if ireq.editable or (is_url_requirement(ireq) and not ireq.link.is_wheel): for dependency in self.repository.get_dependencies(ireq): @@ -595,7 +573,7 @@ index fc53f18..7e856fe 100644 elif not is_pinned_requirement(ireq): raise TypeError( "Expected pinned or editable requirement, got {}".format(ireq) -@@ -356,7 +380,7 @@ class Resolver(object): +@@ -358,7 +381,7 @@ class Resolver(object): fg="yellow", ) dependencies = self.repository.get_dependencies(ireq) @@ -604,7 +582,7 @@ index fc53f18..7e856fe 100644 # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4'] dependency_strings = self.dependency_cache[ireq] -@@ -372,7 +396,8 @@ class Resolver(object): +@@ -374,7 +397,8 @@ class Resolver(object): ) def reverse_dependencies(self, ireqs): @@ -614,11 +592,73 @@ index fc53f18..7e856fe 100644 + ireq for ireq in ireqs if not (ireq.editable or is_non_wheel_url(ireq)) ] return self.dependency_cache.reverse_dependencies(non_editable) +diff --git a/pipenv/patched/piptools/scripts/compile.py b/pipenv/patched/piptools/scripts/compile.py +index 03232a8..a7bfb4c 100755 +--- a/pipenv/patched/piptools/scripts/compile.py ++++ b/pipenv/patched/piptools/scripts/compile.py +@@ -7,8 +7,8 @@ import sys + import tempfile + + from click.utils import safecall +-from pip._internal.commands import create_command +-from pip._internal.req.constructors import install_req_from_line ++from ._compat import InstallCommand ++from ._compat import install_req_from_line + + from .. import click + from .._compat import parse_requirements +@@ -25,7 +25,7 @@ DEFAULT_REQUIREMENTS_FILE = "requirements.in" + DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt" + + # Get default values of the pip's options (including options from pip.conf). +-install_command = create_command("install") ++install_command = InstallComand() + pip_defaults = install_command.parser.get_default_values() + + +diff --git a/pipenv/patched/piptools/scripts/sync.py b/pipenv/patched/piptools/scripts/sync.py +index 137e813..4a7b3d5 100755 +--- a/pipenv/patched/piptools/scripts/sync.py ++++ b/pipenv/patched/piptools/scripts/sync.py +@@ -6,8 +6,7 @@ import os + import shlex + import sys + +-from pip._internal.commands import create_command +-from pip._internal.utils.misc import get_installed_distributions ++from ._compat import get_installed_distributions, InstallCommand + + from .. import click, sync + from .._compat import parse_requirements +@@ -112,7 +111,7 @@ def cli( + log.error("ERROR: " + msg) + sys.exit(2) + +- install_command = create_command("install") ++ install_command = InstallCommand() + options, _ = install_command.parse_args([]) + session = install_command._build_session(options) + finder = install_command._build_package_finder(options=options, session=session) +diff --git a/pipenv/patched/piptools/sync.py b/pipenv/patched/piptools/sync.py +index 430b4bb..015ff7a 100644 +--- a/pipenv/patched/piptools/sync.py ++++ b/pipenv/patched/piptools/sync.py +@@ -4,8 +4,8 @@ import sys + import tempfile + from subprocess import check_call # nosec + +-from pip._internal.commands.freeze import DEV_PKGS +-from pip._internal.utils.compat import stdlib_pkgs ++from ._compat import DEV_PKGS ++from ._compat import stdlib_pkgs + + from . import click + from .exceptions import IncompatibleRequirements diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py -index 8727f1e..c9f53f7 100644 +index 7733447..aa93ec8 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py -@@ -1,6 +1,7 @@ +@@ -1,14 +1,19 @@ # coding: utf-8 from __future__ import absolute_import, division, print_function, unicode_literals @@ -626,18 +666,20 @@ index 8727f1e..c9f53f7 100644 import sys from collections import OrderedDict from itertools import chain, groupby -@@ -8,6 +9,10 @@ from itertools import chain, groupby + import six from click.utils import LazyFile +-from pip._internal.req.constructors import install_req_from_line ++from ._compat import install_req_from_line from six.moves import shlex_quote +from pipenv.vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier +from pipenv.vendor.packaging.version import Version, InvalidVersion, parse as parse_version +from pipenv.vendor.packaging.markers import Marker, Op, Value, Variable + - from ._compat import PIP_VERSION, InstallCommand, install_req_from_line + from ._compat import PIP_VERSION from .click import style -@@ -23,6 +28,70 @@ COMPILE_EXCLUDE_OPTIONS = { +@@ -25,6 +30,70 @@ COMPILE_EXCLUDE_OPTIONS = { } @@ -708,7 +750,7 @@ index 8727f1e..c9f53f7 100644 def key_from_ireq(ireq): """Get a standardized key for an InstallRequirement.""" if ireq.req is None and ireq.link is not None: -@@ -48,16 +117,51 @@ def comment(text): +@@ -50,16 +119,51 @@ def comment(text): return style(text, fg="green") @@ -738,13 +780,13 @@ index 8727f1e..c9f53f7 100644 + Formats a packaging.requirements.Requirement with a lowercase name. + + This is simply a copy of -+ https://github.com/pypa/packaging/blob/16.8/packaging/requirements.py#L109-L124 ++ https://github.com/pypa/pipenv/patched/packaging/blob/pipenv/patched/16.8/packaging/requirements.py#L109-L124 + modified to lowercase the dependency name. + + Previously, we were invoking the original Requirement.__str__ method and + lowercasing the entire result, which would lowercase the name, *and* other, + important stuff that should not be lowercased (such as the marker). See -+ this issue for more information: https://github.com/pypa/pipenv/issues/2113. ++ this issue for more information: https://github.com/pypa/pipenv/patched/pipenv/issues/2113. + """ + parts = [requirement.name.lower()] + @@ -764,7 +806,7 @@ index 8727f1e..c9f53f7 100644 def is_url_requirement(ireq): -@@ -78,10 +182,10 @@ def format_requirement(ireq, marker=None, hashes=None): +@@ -80,10 +184,10 @@ def format_requirement(ireq, marker=None, hashes=None): elif is_url_requirement(ireq): line = ireq.link.url else: From 7744b6c43a92206f89fb421c02289976b2782fa3 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 15:28:55 -0400 Subject: [PATCH 03/12] Update piptools resolver creation Signed-off-by: Dan Ryan --- pipenv/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pipenv/utils.py b/pipenv/utils.py index 13fba13331..eb1f6519e8 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -781,10 +781,12 @@ def parsed_constraints(self): return self._parsed_constraints def get_resolver(self, clear=False, pre=False): - from pipenv.patched.piptools.resolver import Resolver - self._resolver = Resolver( + from pipenv.patched.piptools.resolver import Resolver as PiptoolsResolver + from pipenv.patched.piptools.cache import DependencyCache + self._resolver = PiptoolsResolver( constraints=self.parsed_constraints, repository=self.repository, - clear_caches=clear, prereleases=pre, + cache=DependencyCache(environments.PIPENV_CACHE_DIR), clear_caches=clear, + prereleases=pre, allow_unsafe=False ) @property From ef54bfd725010b989dc62d4771b73b058ed6b569 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 15:30:50 -0400 Subject: [PATCH 04/12] Update versions of pip and piptools in patched.txt Signed-off-by: Dan Ryan --- pipenv/patched/patched.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index b8a2816a25..e5efa4388e 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -1,5 +1,5 @@ safety==1.8.7 crayons==0.1.2 pipfile==0.0.2 -pip-tools==4.3.0 -pip==19.3.1 +pip-tools==5.0.0 +pip==20.0.2 From 89eb67ef92efa7a68f0dcd1261af12e666248029 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 16:58:11 -0400 Subject: [PATCH 05/12] Rerun vendoring and remove extra files Signed-off-by: Dan Ryan --- pipenv/patched/notpip/_internal/collector.py | 548 -------- .../distributions/source/__init__.py | 0 .../_internal/distributions/source/legacy.py | 98 -- pipenv/patched/notpip/_internal/download.py | 578 -------- pipenv/patched/notpip/_internal/index.py | 1028 -------------- .../_internal/operations/generate_metadata.py | 136 -- pipenv/patched/notpip/_internal/wheel.py | 1181 ----------------- pipenv/patched/notpip/_vendor/README.rst | 151 --- .../patched/notpip/_vendor/cachecontrol.pyi | 1 - pipenv/patched/notpip/_vendor/certifi.pyi | 1 - pipenv/patched/notpip/_vendor/chardet.pyi | 1 - pipenv/patched/notpip/_vendor/colorama.pyi | 1 - pipenv/patched/notpip/_vendor/distlib.pyi | 1 - pipenv/patched/notpip/_vendor/distro.pyi | 1 - pipenv/patched/notpip/_vendor/html5lib.pyi | 1 - pipenv/patched/notpip/_vendor/idna.pyi | 1 - pipenv/patched/notpip/_vendor/ipaddress.pyi | 1 - pipenv/patched/notpip/_vendor/msgpack.pyi | 1 - pipenv/patched/notpip/_vendor/packaging.pyi | 1 - .../notpip/_vendor/packaging/LICENSE.APACHE | 2 +- .../patched/notpip/_vendor/packaging/py.typed | 0 pipenv/patched/notpip/_vendor/pep517.pyi | 1 - .../patched/notpip/_vendor/pkg_resources.pyi | 1 - pipenv/patched/notpip/_vendor/progress.pyi | 1 - pipenv/patched/notpip/_vendor/pyparsing.pyi | 1 - pipenv/patched/notpip/_vendor/pytoml.pyi | 1 - pipenv/patched/notpip/_vendor/requests.pyi | 1 - pipenv/patched/notpip/_vendor/retrying.pyi | 1 - pipenv/patched/notpip/_vendor/six.LICENSE | 2 +- .../patched/notpip/_vendor/six/__init__.pyi | 1 - .../notpip/_vendor/six/moves/__init__.pyi | 1 - .../notpip/_vendor/six/moves/configparser.pyi | 1 - pipenv/patched/notpip/_vendor/urllib3.pyi | 1 - pipenv/patched/notpip/_vendor/vendor.txt | 20 +- .../patched/notpip/_vendor/webencodings.pyi | 1 - pipenv/patched/notpip/appdirs.LICENSE.txt | 23 - pipenv/patched/notpip/contextlib2.LICENSE.txt | 122 -- pipenv/patched/notpip/distro.LICENSE | 202 --- pipenv/patched/notpip/idna.LICENSE.rst | 80 -- pipenv/patched/notpip/ipaddress.LICENSE | 50 - pipenv/patched/notpip/msgpack.COPYING | 14 - .../patched/notpip/packaging.LICENSE.APACHE | 177 --- pipenv/patched/notpip/packaging.LICENSE.BSD | 23 - pipenv/patched/notpip/pyparsing.LICENSE | 18 - pipenv/patched/notpip/retrying.LICENSE | 202 --- pipenv/patched/notpip/six.LICENSE | 18 - pipenv/patched/notpip/webencodings.LICENSE | 31 - pipenv/patched/piptools/scripts/compile.py | 0 pipenv/patched/piptools/scripts/sync.py | 0 pipenv/patched/piptools/utils.py | 4 +- .../{safety.LICENSE => safety/LICENSE} | 1 + pipenv/vendor/pip_shims/__init__.py | 20 + pipenv/vendor/pip_shims/compat.py | 2 +- 53 files changed, 36 insertions(+), 4718 deletions(-) delete mode 100644 pipenv/patched/notpip/_internal/collector.py delete mode 100644 pipenv/patched/notpip/_internal/distributions/source/__init__.py delete mode 100644 pipenv/patched/notpip/_internal/distributions/source/legacy.py delete mode 100644 pipenv/patched/notpip/_internal/download.py delete mode 100644 pipenv/patched/notpip/_internal/index.py delete mode 100644 pipenv/patched/notpip/_internal/operations/generate_metadata.py delete mode 100644 pipenv/patched/notpip/_internal/wheel.py delete mode 100644 pipenv/patched/notpip/_vendor/README.rst delete mode 100644 pipenv/patched/notpip/_vendor/cachecontrol.pyi delete mode 100644 pipenv/patched/notpip/_vendor/certifi.pyi delete mode 100644 pipenv/patched/notpip/_vendor/chardet.pyi delete mode 100644 pipenv/patched/notpip/_vendor/colorama.pyi delete mode 100644 pipenv/patched/notpip/_vendor/distlib.pyi delete mode 100644 pipenv/patched/notpip/_vendor/distro.pyi delete mode 100644 pipenv/patched/notpip/_vendor/html5lib.pyi delete mode 100644 pipenv/patched/notpip/_vendor/idna.pyi delete mode 100644 pipenv/patched/notpip/_vendor/ipaddress.pyi delete mode 100644 pipenv/patched/notpip/_vendor/msgpack.pyi delete mode 100644 pipenv/patched/notpip/_vendor/packaging.pyi delete mode 100644 pipenv/patched/notpip/_vendor/packaging/py.typed delete mode 100644 pipenv/patched/notpip/_vendor/pep517.pyi delete mode 100644 pipenv/patched/notpip/_vendor/pkg_resources.pyi delete mode 100644 pipenv/patched/notpip/_vendor/progress.pyi delete mode 100644 pipenv/patched/notpip/_vendor/pyparsing.pyi delete mode 100644 pipenv/patched/notpip/_vendor/pytoml.pyi delete mode 100644 pipenv/patched/notpip/_vendor/requests.pyi delete mode 100644 pipenv/patched/notpip/_vendor/retrying.pyi delete mode 100644 pipenv/patched/notpip/_vendor/six/__init__.pyi delete mode 100644 pipenv/patched/notpip/_vendor/six/moves/__init__.pyi delete mode 100644 pipenv/patched/notpip/_vendor/six/moves/configparser.pyi delete mode 100644 pipenv/patched/notpip/_vendor/urllib3.pyi delete mode 100644 pipenv/patched/notpip/_vendor/webencodings.pyi delete mode 100644 pipenv/patched/notpip/appdirs.LICENSE.txt delete mode 100644 pipenv/patched/notpip/contextlib2.LICENSE.txt delete mode 100644 pipenv/patched/notpip/distro.LICENSE delete mode 100644 pipenv/patched/notpip/idna.LICENSE.rst delete mode 100644 pipenv/patched/notpip/ipaddress.LICENSE delete mode 100644 pipenv/patched/notpip/msgpack.COPYING delete mode 100644 pipenv/patched/notpip/packaging.LICENSE.APACHE delete mode 100644 pipenv/patched/notpip/packaging.LICENSE.BSD delete mode 100644 pipenv/patched/notpip/pyparsing.LICENSE delete mode 100644 pipenv/patched/notpip/retrying.LICENSE delete mode 100644 pipenv/patched/notpip/six.LICENSE delete mode 100644 pipenv/patched/notpip/webencodings.LICENSE mode change 100755 => 100644 pipenv/patched/piptools/scripts/compile.py mode change 100755 => 100644 pipenv/patched/piptools/scripts/sync.py rename pipenv/patched/{safety.LICENSE => safety/LICENSE} (99%) diff --git a/pipenv/patched/notpip/_internal/collector.py b/pipenv/patched/notpip/_internal/collector.py deleted file mode 100644 index 1469cb7ce8..0000000000 --- a/pipenv/patched/notpip/_internal/collector.py +++ /dev/null @@ -1,548 +0,0 @@ -""" -The main purpose of this module is to expose LinkCollector.collect_links(). -""" - -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - -import cgi -import itertools -import logging -import mimetypes -import os -from collections import OrderedDict - -from pipenv.patched.notpip._vendor import html5lib, requests -from pipenv.patched.notpip._vendor.distlib.compat import unescape -from pipenv.patched.notpip._vendor.requests.exceptions import HTTPError, RetryError, SSLError -from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse -from pipenv.patched.notpip._vendor.six.moves.urllib import request as urllib_request - -from pipenv.patched.notpip._internal.models.link import Link -from pipenv.patched.notpip._internal.utils.filetypes import ARCHIVE_EXTENSIONS -from pipenv.patched.notpip._internal.utils.misc import redact_auth_from_url -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.utils.urls import path_to_url, url_to_path -from pipenv.patched.notpip._internal.vcs import is_url, vcs - -if MYPY_CHECK_RUNNING: - from typing import ( - Callable, Dict, Iterable, List, MutableMapping, Optional, Sequence, - Tuple, Union, - ) - import xml.etree.ElementTree - - from pipenv.patched.notpip._vendor.requests import Response - - from pipenv.patched.notpip._internal.models.search_scope import SearchScope - from pipenv.patched.notpip._internal.network.session import PipSession - - HTMLElement = xml.etree.ElementTree.Element - ResponseHeaders = MutableMapping[str, str] - - -logger = logging.getLogger(__name__) - - -def _match_vcs_scheme(url): - # type: (str) -> Optional[str] - """Look for VCS schemes in the URL. - - Returns the matched VCS scheme, or None if there's no match. - """ - for scheme in vcs.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - return scheme - return None - - -def _is_url_like_archive(url): - # type: (str) -> bool - """Return whether the URL looks like an archive. - """ - filename = Link(url).filename - for bad_ext in ARCHIVE_EXTENSIONS: - if filename.endswith(bad_ext): - return True - return False - - -class _NotHTML(Exception): - def __init__(self, content_type, request_desc): - # type: (str, str) -> None - super(_NotHTML, self).__init__(content_type, request_desc) - self.content_type = content_type - self.request_desc = request_desc - - -def _ensure_html_header(response): - # type: (Response) -> None - """Check the Content-Type header to ensure the response contains HTML. - - Raises `_NotHTML` if the content type is not text/html. - """ - content_type = response.headers.get("Content-Type", "") - if not content_type.lower().startswith("text/html"): - raise _NotHTML(content_type, response.request.method) - - -class _NotHTTP(Exception): - pass - - -def _ensure_html_response(url, session): - # type: (str, PipSession) -> None - """Send a HEAD request to the URL, and ensure the response contains HTML. - - Raises `_NotHTTP` if the URL is not available for a HEAD request, or - `_NotHTML` if the content type is not text/html. - """ - scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) - if scheme not in {'http', 'https'}: - raise _NotHTTP() - - resp = session.head(url, allow_redirects=True) - resp.raise_for_status() - - _ensure_html_header(resp) - - -def _get_html_response(url, session): - # type: (str, PipSession) -> Response - """Access an HTML page with GET, and return the response. - - This consists of three parts: - - 1. If the URL looks suspiciously like an archive, send a HEAD first to - check the Content-Type is HTML, to avoid downloading a large file. - Raise `_NotHTTP` if the content type cannot be determined, or - `_NotHTML` if it is not HTML. - 2. Actually perform the request. Raise HTTP exceptions on network failures. - 3. Check the Content-Type header to make sure we got HTML, and raise - `_NotHTML` otherwise. - """ - if _is_url_like_archive(url): - _ensure_html_response(url, session=session) - - logger.debug('Getting page %s', redact_auth_from_url(url)) - - resp = session.get( - url, - headers={ - "Accept": "text/html", - # We don't want to blindly returned cached data for - # /simple/, because authors generally expecting that - # twine upload && pip install will function, but if - # they've done a pip install in the last ~10 minutes - # it won't. Thus by setting this to zero we will not - # blindly use any cached data, however the benefit of - # using max-age=0 instead of no-cache, is that we will - # still support conditional requests, so we will still - # minimize traffic sent in cases where the page hasn't - # changed at all, we will just always incur the round - # trip for the conditional GET now instead of only - # once per 10 minutes. - # For more information, please see pypa/pip#5670. - "Cache-Control": "max-age=0", - }, - ) - resp.raise_for_status() - - # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement of an url. Unless we issue a HEAD request on every - # url we cannot know ahead of time for sure if something is HTML - # or not. However we can check after we've downloaded it. - _ensure_html_header(resp) - - return resp - - -def _get_encoding_from_headers(headers): - # type: (ResponseHeaders) -> Optional[str] - """Determine if we have any encoding information in our headers. - """ - if headers and "Content-Type" in headers: - content_type, params = cgi.parse_header(headers["Content-Type"]) - if "charset" in params: - return params['charset'] - return None - - -def _determine_base_url(document, page_url): - # type: (HTMLElement, str) -> str - """Determine the HTML document's base URL. - - This looks for a ```` tag in the HTML document. If present, its href - attribute denotes the base URL of anchor tags in the document. If there is - no such tag (or if it does not have a valid href attribute), the HTML - file's URL is used as the base URL. - - :param document: An HTML document representation. The current - implementation expects the result of ``html5lib.parse()``. - :param page_url: The URL of the HTML document. - """ - for base in document.findall(".//base"): - href = base.get("href") - if href is not None: - return href - return page_url - - -def _clean_link(url): - # type: (str) -> str - """Makes sure a link is fully encoded. That is, if a ' ' shows up in - the link, it will be rewritten to %20 (while not over-quoting - % or other characters).""" - # Split the URL into parts according to the general structure - # `scheme://netloc/path;parameters?query#fragment`. Note that the - # `netloc` can be empty and the URI will then refer to a local - # filesystem path. - result = urllib_parse.urlparse(url) - # In both cases below we unquote prior to quoting to make sure - # nothing is double quoted. - if result.netloc == "": - # On Windows the path part might contain a drive letter which - # should not be quoted. On Linux where drive letters do not - # exist, the colon should be quoted. We rely on urllib.request - # to do the right thing here. - path = urllib_request.pathname2url( - urllib_request.url2pathname(result.path)) - else: - # In addition to the `/` character we protect `@` so that - # revision strings in VCS URLs are properly parsed. - path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@") - return urllib_parse.urlunparse(result._replace(path=path)) - - -def _create_link_from_element( - anchor, # type: HTMLElement - page_url, # type: str - base_url, # type: str -): - # type: (...) -> Optional[Link] - """ - Convert an anchor element in a simple repository page to a Link. - """ - href = anchor.get("href") - if not href: - return None - - url = _clean_link(urllib_parse.urljoin(base_url, href)) - pyrequire = anchor.get('data-requires-python') - pyrequire = unescape(pyrequire) if pyrequire else None - - yanked_reason = anchor.get('data-yanked') - if yanked_reason: - # This is a unicode string in Python 2 (and 3). - yanked_reason = unescape(yanked_reason) - - link = Link( - url, - comes_from=page_url, - requires_python=pyrequire, - yanked_reason=yanked_reason, - ) - - return link - - -def parse_links(page): - # type: (HTMLPage) -> Iterable[Link] - """ - Parse an HTML document, and yield its anchor elements as Link objects. - """ - document = html5lib.parse( - page.content, - transport_encoding=page.encoding, - namespaceHTMLElements=False, - ) - - url = page.url - base_url = _determine_base_url(document, url) - for anchor in document.findall(".//a"): - link = _create_link_from_element( - anchor, - page_url=url, - base_url=base_url, - ) - if link is None: - continue - yield link - - -class HTMLPage(object): - """Represents one page, along with its URL""" - - def __init__( - self, - content, # type: bytes - encoding, # type: Optional[str] - url, # type: str - ): - # type: (...) -> None - """ - :param encoding: the encoding to decode the given content. - :param url: the URL from which the HTML was downloaded. - """ - self.content = content - self.encoding = encoding - self.url = url - - def __str__(self): - return redact_auth_from_url(self.url) - - -def _handle_get_page_fail( - link, # type: Link - reason, # type: Union[str, Exception] - meth=None # type: Optional[Callable[..., None]] -): - # type: (...) -> None - if meth is None: - meth = logger.debug - meth("Could not fetch URL %s: %s - skipping", link, reason) - - -def _make_html_page(response): - # type: (Response) -> HTMLPage - encoding = _get_encoding_from_headers(response.headers) - return HTMLPage(response.content, encoding=encoding, url=response.url) - - -def _get_html_page(link, session=None): - # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] - if session is None: - raise TypeError( - "_get_html_page() missing 1 required keyword argument: 'session'" - ) - - url = link.url.split('#', 1)[0] - - # Check for VCS schemes that do not support lookup as web pages. - vcs_scheme = _match_vcs_scheme(url) - if vcs_scheme: - logger.debug('Cannot look at %s URL %s', vcs_scheme, link) - return None - - # Tack index.html onto file:// URLs that point to directories - scheme, _, path, _, _, _ = urllib_parse.urlparse(url) - if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): - # add trailing slash if not present so urljoin doesn't trim - # final segment - if not url.endswith('/'): - url += '/' - url = urllib_parse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s', url) - - try: - resp = _get_html_response(url, session=session) - except _NotHTTP: - logger.debug( - 'Skipping page %s because it looks like an archive, and cannot ' - 'be checked by HEAD.', link, - ) - except _NotHTML as exc: - logger.debug( - 'Skipping page %s because the %s request got Content-Type: %s', - link, exc.request_desc, exc.content_type, - ) - except HTTPError as exc: - _handle_get_page_fail(link, exc) - except RetryError as exc: - _handle_get_page_fail(link, exc) - except SSLError as exc: - reason = "There was a problem confirming the ssl certificate: " - reason += str(exc) - _handle_get_page_fail(link, reason, meth=logger.info) - except requests.ConnectionError as exc: - _handle_get_page_fail(link, "connection error: %s" % exc) - except requests.Timeout: - _handle_get_page_fail(link, "timed out") - else: - return _make_html_page(resp) - return None - - -def _remove_duplicate_links(links): - # type: (Iterable[Link]) -> List[Link] - """ - Return a list of links, with duplicates removed and ordering preserved. - """ - # We preserve the ordering when removing duplicates because we can. - return list(OrderedDict.fromkeys(links)) - - -def group_locations(locations, expand_dir=False): - # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] - """ - Divide a list of locations into two groups: "files" (archives) and "urls." - - :return: A pair of lists (files, urls). - """ - files = [] - urls = [] - - # puts the url for the given file path into the appropriate list - def sort_path(path): - url = path_to_url(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': - urls.append(url) - else: - files.append(url) - - for url in locations: - - is_local_path = os.path.exists(url) - is_file_url = url.startswith('file:') - - if is_local_path or is_file_url: - if is_local_path: - path = url - else: - path = url_to_path(url) - if os.path.isdir(path): - if expand_dir: - path = os.path.realpath(path) - for item in os.listdir(path): - sort_path(os.path.join(path, item)) - elif is_file_url: - urls.append(url) - else: - logger.warning( - "Path '{0}' is ignored: " - "it is a directory.".format(path), - ) - elif os.path.isfile(path): - sort_path(path) - else: - logger.warning( - "Url '%s' is ignored: it is neither a file " - "nor a directory.", url, - ) - elif is_url(url): - # Only add url with clear scheme - urls.append(url) - else: - logger.warning( - "Url '%s' is ignored. It is either a non-existing " - "path or lacks a specific scheme.", url, - ) - - return files, urls - - -class CollectedLinks(object): - - """ - Encapsulates all the Link objects collected by a call to - LinkCollector.collect_links(), stored separately as-- - - (1) links from the configured file locations, - (2) links from the configured find_links, and - (3) a dict mapping HTML page url to links from that page. - """ - - def __init__( - self, - files, # type: List[Link] - find_links, # type: List[Link] - pages, # type: Dict[str, List[Link]] - ): - # type: (...) -> None - """ - :param files: Links from file locations. - :param find_links: Links from find_links. - :param pages: A dict mapping HTML page url to links from that page. - """ - self.files = files - self.find_links = find_links - self.pages = pages - - -class LinkCollector(object): - - """ - Responsible for collecting Link objects from all configured locations, - making network requests as needed. - - The class's main method is its collect_links() method. - """ - - def __init__( - self, - session, # type: PipSession - search_scope, # type: SearchScope - ): - # type: (...) -> None - self.search_scope = search_scope - self.session = session - - @property - def find_links(self): - # type: () -> List[str] - return self.search_scope.find_links - - def _get_pages(self, locations): - # type: (Iterable[Link]) -> Iterable[HTMLPage] - """ - Yields (page, page_url) from the given locations, skipping - locations that have errors. - """ - for location in locations: - page = _get_html_page(location, session=self.session) - if page is None: - continue - - yield page - - def collect_links(self, project_name): - # type: (str) -> CollectedLinks - """Find all available links for the given project name. - - :return: All the Link objects (unfiltered), as a CollectedLinks object. - """ - search_scope = self.search_scope - index_locations = search_scope.get_index_urls_locations(project_name) - index_file_loc, index_url_loc = group_locations(index_locations) - fl_file_loc, fl_url_loc = group_locations( - self.find_links, expand_dir=True, - ) - - file_links = [ - Link(url) for url in itertools.chain(index_file_loc, fl_file_loc) - ] - - # We trust every directly linked archive in find_links - find_link_links = [Link(url, '-f') for url in self.find_links] - - # We trust every url that the user has given us whether it was given - # via --index-url or --find-links. - # We want to filter out anything that does not have a secure origin. - url_locations = [ - link for link in itertools.chain( - (Link(url) for url in index_url_loc), - (Link(url) for url in fl_url_loc), - ) - if self.session.is_secure_origin(link) - ] - - url_locations = _remove_duplicate_links(url_locations) - lines = [ - '{} location(s) to search for versions of {}:'.format( - len(url_locations), project_name, - ), - ] - for link in url_locations: - lines.append('* {}'.format(link)) - logger.debug('\n'.join(lines)) - - pages_links = {} - for page in self._get_pages(url_locations): - pages_links[page.url] = list(parse_links(page)) - - return CollectedLinks( - files=file_links, - find_links=find_link_links, - pages=pages_links, - ) diff --git a/pipenv/patched/notpip/_internal/distributions/source/__init__.py b/pipenv/patched/notpip/_internal/distributions/source/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pipenv/patched/notpip/_internal/distributions/source/legacy.py b/pipenv/patched/notpip/_internal/distributions/source/legacy.py deleted file mode 100644 index 0e700d2a78..0000000000 --- a/pipenv/patched/notpip/_internal/distributions/source/legacy.py +++ /dev/null @@ -1,98 +0,0 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - -import logging - -from pipenv.patched.notpip._internal.build_env import BuildEnvironment -from pipenv.patched.notpip._internal.distributions.base import AbstractDistribution -from pipenv.patched.notpip._internal.exceptions import InstallationError -from pipenv.patched.notpip._internal.utils.subprocess import runner_with_spinner_message - -logger = logging.getLogger(__name__) - - -class SourceDistribution(AbstractDistribution): - """Represents a source distribution. - - The preparation step for these needs metadata for the packages to be - generated, either using PEP 517 or using the legacy `setup.py egg_info`. - - NOTE from @pradyunsg (14 June 2019) - I expect SourceDistribution class will need to be split into - `legacy_source` (setup.py based) and `source` (PEP 517 based) when we start - bringing logic for preparation out of InstallRequirement into this class. - """ - - def get_pkg_resources_distribution(self): - return self.req.get_dist() - - def prepare_distribution_metadata(self, finder, build_isolation): - # Prepare for building. We need to: - # 1. Load pyproject.toml (if it exists) - # 2. Set up the build environment - - self.req.load_pyproject_toml() - should_isolate = self.req.use_pep517 and build_isolation - if should_isolate: - self._setup_isolation(finder) - - self.req.prepare_metadata() - self.req.assert_source_matches_version() - - def _setup_isolation(self, finder): - def _raise_conflicts(conflicting_with, conflicting_reqs): - format_string = ( - "Some build dependencies for {requirement} " - "conflict with {conflicting_with}: {description}." - ) - error_message = format_string.format( - requirement=self.req, - conflicting_with=conflicting_with, - description=', '.join( - '%s is incompatible with %s' % (installed, wanted) - for installed, wanted in sorted(conflicting) - ) - ) - raise InstallationError(error_message) - - # Isolate in a BuildEnvironment and install the build-time - # requirements. - self.req.build_env = BuildEnvironment() - self.req.build_env.install_requirements( - finder, self.req.pyproject_requires, 'overlay', - "Installing build dependencies" - ) - conflicting, missing = self.req.build_env.check_requirements( - self.req.requirements_to_check - ) - if conflicting: - _raise_conflicts("PEP 517/518 supported requirements", - conflicting) - if missing: - logger.warning( - "Missing build requirements in pyproject.toml for %s.", - self.req, - ) - logger.warning( - "The project does not specify a build backend, and " - "pip cannot fall back to setuptools without %s.", - " and ".join(map(repr, sorted(missing))) - ) - # Install any extra build dependencies that the backend requests. - # This must be done in a second pass, as the pyproject.toml - # dependencies must be installed before we can call the backend. - with self.req.build_env: - runner = runner_with_spinner_message( - "Getting requirements to build wheel" - ) - backend = self.req.pep517_backend - with backend.subprocess_runner(runner): - reqs = backend.get_requires_for_build_wheel() - - conflicting, missing = self.req.build_env.check_requirements(reqs) - if conflicting: - _raise_conflicts("the backend dependencies", conflicting) - self.req.build_env.install_requirements( - finder, missing, 'normal', - "Installing backend dependencies" - ) diff --git a/pipenv/patched/notpip/_internal/download.py b/pipenv/patched/notpip/_internal/download.py deleted file mode 100644 index b8d12e17fd..0000000000 --- a/pipenv/patched/notpip/_internal/download.py +++ /dev/null @@ -1,578 +0,0 @@ -# The following comment should be removed at some point in the future. -# mypy: disallow-untyped-defs=False - -from __future__ import absolute_import - -import cgi -import logging -import mimetypes -import os -import re -import shutil -import sys - -from pipenv.patched.notpip._vendor import requests -from pipenv.patched.notpip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response -from pipenv.patched.notpip._vendor.six import PY2 -from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse - -from pipenv.patched.notpip._internal.exceptions import HashMismatch, InstallationError -from pipenv.patched.notpip._internal.models.index import PyPI -from pipenv.patched.notpip._internal.network.session import PipSession -from pipenv.patched.notpip._internal.utils.encoding import auto_decode -from pipenv.patched.notpip._internal.utils.filesystem import copy2_fixed -from pipenv.patched.notpip._internal.utils.misc import ( - ask_path_exists, - backup_dir, - consume, - display_path, - format_size, - hide_url, - path_to_display, - rmtree, - splitext, -) -from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.utils.ui import DownloadProgressProvider -from pipenv.patched.notpip._internal.utils.unpacking import unpack_file -from pipenv.patched.notpip._internal.utils.urls import get_url_scheme -from pipenv.patched.notpip._internal.vcs import vcs - -if MYPY_CHECK_RUNNING: - from typing import ( - IO, Callable, List, Optional, Text, Tuple, - ) - - from mypy_extensions import TypedDict - - from pipenv.patched.notpip._internal.models.link import Link - from pipenv.patched.notpip._internal.utils.hashes import Hashes - from pipenv.patched.notpip._internal.vcs.versioncontrol import VersionControl - - if PY2: - CopytreeKwargs = TypedDict( - 'CopytreeKwargs', - { - 'ignore': Callable[[str, List[str]], List[str]], - 'symlinks': bool, - }, - total=False, - ) - else: - CopytreeKwargs = TypedDict( - 'CopytreeKwargs', - { - 'copy_function': Callable[[str, str], None], - 'ignore': Callable[[str, List[str]], List[str]], - 'ignore_dangling_symlinks': bool, - 'symlinks': bool, - }, - total=False, - ) - - -__all__ = ['get_file_content', - 'unpack_vcs_link', - 'unpack_file_url', - 'unpack_http_url', 'unpack_url', - 'parse_content_disposition', 'sanitize_content_filename'] - - -logger = logging.getLogger(__name__) - - -def get_file_content(url, comes_from=None, session=None): - # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text] - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content). Content is unicode. - - :param url: File path or url. - :param comes_from: Origin description of requirements. - :param session: Instance of pip.download.PipSession. - """ - if session is None: - raise TypeError( - "get_file_content() missing 1 required keyword argument: 'session'" - ) - - scheme = get_url_scheme(url) - - if scheme in ['http', 'https']: - # FIXME: catch some errors - resp = session.get(url) - resp.raise_for_status() - return resp.url, resp.text - - elif scheme == 'file': - if comes_from and comes_from.startswith('http'): - raise InstallationError( - 'Requirements file %s references URL %s, which is local' - % (comes_from, url)) - - path = url.split(':', 1)[1] - path = path.replace('\\', '/') - match = _url_slash_drive_re.match(path) - if match: - path = match.group(1) + ':' + path.split('|', 1)[1] - path = urllib_parse.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') - url = path - - try: - with open(url, 'rb') as f: - content = auto_decode(f.read()) - except IOError as exc: - raise InstallationError( - 'Could not open requirements file: %s' % str(exc) - ) - return url, content - - -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) - - -def unpack_vcs_link(link, location): - # type: (Link, str) -> None - vcs_backend = _get_used_vcs_backend(link) - assert vcs_backend is not None - vcs_backend.unpack(location, url=hide_url(link.url)) - - -def _get_used_vcs_backend(link): - # type: (Link) -> Optional[VersionControl] - """ - Return a VersionControl object or None. - """ - for vcs_backend in vcs.backends: - if link.scheme in vcs_backend.schemes: - return vcs_backend - return None - - -def _progress_indicator(iterable, *args, **kwargs): - return iterable - - -def _download_url( - resp, # type: Response - link, # type: Link - content_file, # type: IO - hashes, # type: Optional[Hashes] - progress_bar # type: str -): - # type: (...) -> None - try: - total_length = int(resp.headers['content-length']) - except (ValueError, KeyError, TypeError): - total_length = 0 - - cached_resp = getattr(resp, "from_cache", False) - if logger.getEffectiveLevel() > logging.INFO: - show_progress = False - elif cached_resp: - show_progress = False - elif total_length > (40 * 1000): - show_progress = True - elif not total_length: - show_progress = True - else: - show_progress = False - - show_url = link.show_url - - def resp_read(chunk_size): - try: - # Special case for urllib3. - for chunk in resp.raw.stream( - chunk_size, - # We use decode_content=False here because we don't - # want urllib3 to mess with the raw bytes we get - # from the server. If we decompress inside of - # urllib3 then we cannot verify the checksum - # because the checksum will be of the compressed - # file. This breakage will only occur if the - # server adds a Content-Encoding header, which - # depends on how the server was configured: - # - Some servers will notice that the file isn't a - # compressible file and will leave the file alone - # and with an empty Content-Encoding - # - Some servers will notice that the file is - # already compressed and will leave the file - # alone and will add a Content-Encoding: gzip - # header - # - Some servers won't notice anything at all and - # will take a file that's already been compressed - # and compress it again and set the - # Content-Encoding: gzip header - # - # By setting this not to decode automatically we - # hope to eliminate problems with the second case. - decode_content=False): - yield chunk - except AttributeError: - # Standard file-like object. - while True: - chunk = resp.raw.read(chunk_size) - if not chunk: - break - yield chunk - - def written_chunks(chunks): - for chunk in chunks: - content_file.write(chunk) - yield chunk - - progress_indicator = _progress_indicator - - if link.netloc == PyPI.netloc: - url = show_url - else: - url = link.url_without_fragment - - if show_progress: # We don't show progress on cached responses - progress_indicator = DownloadProgressProvider(progress_bar, - max=total_length) - if total_length: - logger.info("Downloading %s (%s)", url, format_size(total_length)) - else: - logger.info("Downloading %s", url) - elif cached_resp: - logger.info("Using cached %s", url) - else: - logger.info("Downloading %s", url) - - downloaded_chunks = written_chunks( - progress_indicator( - resp_read(CONTENT_CHUNK_SIZE), - CONTENT_CHUNK_SIZE - ) - ) - if hashes: - hashes.check_against_chunks(downloaded_chunks) - else: - consume(downloaded_chunks) - - -def _copy_file(filename, location, link): - copy = True - download_location = os.path.join(location, link.filename) - if os.path.exists(download_location): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % - display_path(download_location), ('i', 'w', 'b', 'a')) - if response == 'i': - copy = False - elif response == 'w': - logger.warning('Deleting %s', display_path(download_location)) - os.remove(download_location) - elif response == 'b': - dest_file = backup_dir(download_location) - logger.warning( - 'Backing up %s to %s', - display_path(download_location), - display_path(dest_file), - ) - shutil.move(download_location, dest_file) - elif response == 'a': - sys.exit(-1) - if copy: - shutil.copy(filename, download_location) - logger.info('Saved %s', display_path(download_location)) - - -def unpack_http_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - if session is None: - raise TypeError( - "unpack_http_url() missing 1 required keyword argument: 'session'" - ) - - with TempDirectory(kind="unpack") as temp_dir: - # If a download dir is specified, is the file already downloaded there? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - content_type = mimetypes.guess_type(from_path)[0] - else: - # let's download to a tmp dir - from_path, content_type = _download_http_url(link, - session, - temp_dir.path, - hashes, - progress_bar) - - # unpack the archive to the build dir location. even when only - # downloading archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type) - - # a download dir is specified; let's copy the archive there - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - if not already_downloaded_path: - os.unlink(from_path) - - -def _copy2_ignoring_special_files(src, dest): - # type: (str, str) -> None - """Copying special files is not supported, but as a convenience to users - we skip errors copying them. This supports tools that may create e.g. - socket files in the project source directory. - """ - try: - copy2_fixed(src, dest) - except shutil.SpecialFileError as e: - # SpecialFileError may be raised due to either the source or - # destination. If the destination was the cause then we would actually - # care, but since the destination directory is deleted prior to - # copy we ignore all of them assuming it is caused by the source. - logger.warning( - "Ignoring special file error '%s' encountered copying %s to %s.", - str(e), - path_to_display(src), - path_to_display(dest), - ) - - -def _copy_source_tree(source, target): - # type: (str, str) -> None - def ignore(d, names): - # Pulling in those directories can potentially be very slow, - # exclude the following directories if they appear in the top - # level dir (and only it). - # See discussion at https://github.com/pypa/pip/pull/6770 - return ['.tox', '.nox'] if d == source else [] - - kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs - - if not PY2: - # Python 2 does not support copy_function, so we only ignore - # errors on special file copy in Python 3. - kwargs['copy_function'] = _copy2_ignoring_special_files - - shutil.copytree(source, target, **kwargs) - - -def unpack_file_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - hashes=None # type: Optional[Hashes] -): - # type: (...) -> None - """Unpack link into location. - - If download_dir is provided and link points to a file, make a copy - of the link file inside download_dir. - """ - link_path = link.file_path - # If it's a url to a local directory - if link.is_existing_dir(): - if os.path.isdir(location): - rmtree(location) - _copy_source_tree(link_path, location) - if download_dir: - logger.info('Link is a directory, ignoring download_dir') - return - - # If --require-hashes is off, `hashes` is either empty, the - # link's embedded hash, or MissingHashes; it is required to - # match. If --require-hashes is on, we are satisfied by any - # hash in `hashes` matching: a URL-based or an option-based - # one; no internet-sourced hash will be in `hashes`. - if hashes: - hashes.check_against_path(link_path) - - # If a download dir is specified, is the file already there and valid? - already_downloaded_path = None - if download_dir: - already_downloaded_path = _check_download_dir(link, - download_dir, - hashes) - - if already_downloaded_path: - from_path = already_downloaded_path - else: - from_path = link_path - - content_type = mimetypes.guess_type(from_path)[0] - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type) - - # a download dir is specified and not already downloaded - if download_dir and not already_downloaded_path: - _copy_file(from_path, download_dir, link) - - -def unpack_url( - link, # type: Link - location, # type: str - download_dir=None, # type: Optional[str] - session=None, # type: Optional[PipSession] - hashes=None, # type: Optional[Hashes] - progress_bar="on" # type: str -): - # type: (...) -> None - """Unpack link. - If link is a VCS link: - if only_download, export into download_dir and ignore location - else unpack into location - for other types of link: - - unpack into location - - if download_dir, copy the file into download_dir - - if only_download, mark location for deletion - - :param hashes: A Hashes object, one of whose embedded hashes must match, - or HashMismatch will be raised. If the Hashes is empty, no matches are - required, and unhashable types of requirements (like VCS ones, which - would ordinarily raise HashUnsupported) are allowed. - """ - # non-editable vcs urls - if link.is_vcs: - unpack_vcs_link(link, location) - - # file urls - elif link.is_file: - unpack_file_url(link, location, download_dir, hashes=hashes) - - # http urls - else: - if session is None: - session = PipSession() - - unpack_http_url( - link, - location, - download_dir, - session, - hashes=hashes, - progress_bar=progress_bar - ) - - -def sanitize_content_filename(filename): - # type: (str) -> str - """ - Sanitize the "filename" value from a Content-Disposition header. - """ - return os.path.basename(filename) - - -def parse_content_disposition(content_disposition, default_filename): - # type: (str, str) -> str - """ - Parse the "filename" value from a Content-Disposition header, and - return the default filename if the result is empty. - """ - _type, params = cgi.parse_header(content_disposition) - filename = params.get('filename') - if filename: - # We need to sanitize the filename to prevent directory traversal - # in case the filename contains ".." path parts. - filename = sanitize_content_filename(filename) - return filename or default_filename - - -def _download_http_url( - link, # type: Link - session, # type: PipSession - temp_dir, # type: str - hashes, # type: Optional[Hashes] - progress_bar # type: str -): - # type: (...) -> Tuple[str, str] - """Download link url into temp_dir using provided session""" - target_url = link.url.split('#', 1)[0] - try: - resp = session.get( - target_url, - # We use Accept-Encoding: identity here because requests - # defaults to accepting compressed responses. This breaks in - # a variety of ways depending on how the server is configured. - # - Some servers will notice that the file isn't a compressible - # file and will leave the file alone and with an empty - # Content-Encoding - # - Some servers will notice that the file is already - # compressed and will leave the file alone and will add a - # Content-Encoding: gzip header - # - Some servers won't notice anything at all and will take - # a file that's already been compressed and compress it again - # and set the Content-Encoding: gzip header - # By setting this to request only the identity encoding We're - # hoping to eliminate the third case. Hopefully there does not - # exist a server which when given a file will notice it is - # already compressed and that you're not asking for a - # compressed file and will then decompress it before sending - # because if that's the case I don't think it'll ever be - # possible to make this work. - headers={"Accept-Encoding": "identity"}, - stream=True, - ) - resp.raise_for_status() - except requests.HTTPError as exc: - logger.critical( - "HTTP error %s while getting %s", exc.response.status_code, link, - ) - raise - - content_type = resp.headers.get('content-type', '') - filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') - if content_disposition: - filename = parse_content_disposition(content_disposition, filename) - ext = splitext(filename)[1] # type: Optional[str] - if not ext: - ext = mimetypes.guess_extension(content_type) - if ext: - filename += ext - if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: - filename += ext - file_path = os.path.join(temp_dir, filename) - with open(file_path, 'wb') as content_file: - _download_url(resp, link, content_file, hashes, progress_bar) - return file_path, content_type - - -def _check_download_dir(link, download_dir, hashes): - # type: (Link, str, Optional[Hashes]) -> Optional[str] - """ Check download_dir for previously downloaded file with correct hash - If a correct file is found return its path else None - """ - download_path = os.path.join(download_dir, link.filename) - - if not os.path.exists(download_path): - return None - - # If already downloaded, does its hash match? - logger.info('File was already downloaded %s', download_path) - if hashes: - try: - hashes.check_against_path(download_path) - except HashMismatch: - logger.warning( - 'Previously-downloaded file %s has bad hash. ' - 'Re-downloading.', - download_path - ) - os.unlink(download_path) - return None - return download_path diff --git a/pipenv/patched/notpip/_internal/index.py b/pipenv/patched/notpip/_internal/index.py deleted file mode 100644 index 0f212115a8..0000000000 --- a/pipenv/patched/notpip/_internal/index.py +++ /dev/null @@ -1,1028 +0,0 @@ -"""Routines related to PyPI, indexes""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False -# mypy: disallow-untyped-defs=False - -from __future__ import absolute_import - -import logging -import re - -from pipenv.patched.notpip._vendor.packaging import specifiers -from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name -from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version - -from pipenv.patched.notpip._internal.exceptions import ( - BestVersionAlreadyInstalled, - DistributionNotFound, - InvalidWheelFilename, - UnsupportedWheel, -) -from pipenv.patched.notpip._internal.models.candidate import InstallationCandidate -from pipenv.patched.notpip._internal.models.format_control import FormatControl -from pipenv.patched.notpip._internal.models.link import Link -from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences -from pipenv.patched.notpip._internal.models.target_python import TargetPython -from pipenv.patched.notpip._internal.utils.filetypes import WHEEL_EXTENSION -from pipenv.patched.notpip._internal.utils.logging import indent_log -from pipenv.patched.notpip._internal.utils.misc import build_netloc -from pipenv.patched.notpip._internal.utils.packaging import check_requires_python -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.utils.unpacking import SUPPORTED_EXTENSIONS -from pipenv.patched.notpip._internal.utils.urls import url_to_path -from pipenv.patched.notpip._internal.wheel import Wheel - -if MYPY_CHECK_RUNNING: - from typing import ( - FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union, - ) - from pipenv.patched.notpip._vendor.packaging.version import _BaseVersion - from pipenv.patched.notpip._internal.collector import LinkCollector - from pipenv.patched.notpip._internal.models.search_scope import SearchScope - from pipenv.patched.notpip._internal.req import InstallRequirement - from pipenv.patched.notpip._internal.pep425tags import Pep425Tag - from pipenv.patched.notpip._internal.utils.hashes import Hashes - - BuildTag = Union[Tuple[()], Tuple[int, str]] - CandidateSortingKey = ( - Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] - ) - - -__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] - - -logger = logging.getLogger(__name__) - - -def _check_link_requires_python( - link, # type: Link - version_info, # type: Tuple[int, int, int] - ignore_requires_python=False, # type: bool -): - # type: (...) -> bool - """ - Return whether the given Python version is compatible with a link's - "Requires-Python" value. - - :param version_info: A 3-tuple of ints representing the Python - major-minor-micro version to check. - :param ignore_requires_python: Whether to ignore the "Requires-Python" - value if the given Python version isn't compatible. - """ - try: - is_compatible = check_requires_python( - link.requires_python, version_info=version_info, - ) - except specifiers.InvalidSpecifier: - logger.debug( - "Ignoring invalid Requires-Python (%r) for link: %s", - link.requires_python, link, - ) - else: - if not is_compatible: - version = '.'.join(map(str, version_info)) - if not ignore_requires_python: - logger.debug( - 'Link requires a different Python (%s not in: %r): %s', - version, link.requires_python, link, - ) - return False - - logger.debug( - 'Ignoring failed Requires-Python check (%s not in: %r) ' - 'for link: %s', - version, link.requires_python, link, - ) - - return True - - -class LinkEvaluator(object): - - """ - Responsible for evaluating links for a particular project. - """ - - _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - def __init__( - self, - project_name, # type: str - canonical_name, # type: str - formats, # type: FrozenSet - target_python, # type: TargetPython - allow_yanked, # type: bool - ignore_requires_python=None, # type: Optional[bool] - ignore_compatibility=None, # type: Optional[bool] - ): - # type: (...) -> None - """ - :param project_name: The user supplied package name. - :param canonical_name: The canonical package name. - :param formats: The formats allowed for this package. Should be a set - with 'binary' or 'source' or both in it. - :param target_python: The target Python interpreter to use when - evaluating link compatibility. This is used, for example, to - check wheel compatibility, as well as when checking the Python - version, e.g. the Python version embedded in a link filename - (or egg fragment) and against an HTML link's optional PEP 503 - "data-requires-python" attribute. - :param allow_yanked: Whether files marked as yanked (in the sense - of PEP 592) are permitted to be candidates for install. - :param ignore_requires_python: Whether to ignore incompatible - PEP 503 "data-requires-python" values in HTML links. Defaults - to False. - :param Optional[bool] ignore_compatibility: Whether to ignore - compatibility of python versions and allow all versions of packages. - """ - if ignore_requires_python is None: - ignore_requires_python = False - if ignore_compatibility is None: - ignore_compatibility = True - - self._allow_yanked = allow_yanked - self._canonical_name = canonical_name - self._ignore_requires_python = ignore_requires_python - self._formats = formats - self._target_python = target_python - self._ignore_compatibility = ignore_compatibility - - self.project_name = project_name - - def evaluate_link(self, link): - # type: (Link) -> Tuple[bool, Optional[Text]] - """ - Determine whether a link is a candidate for installation. - - :return: A tuple (is_candidate, result), where `result` is (1) a - version string if `is_candidate` is True, and (2) if - `is_candidate` is False, an optional string to log the reason - the link fails to qualify. - """ - version = None - if link.is_yanked and not self._allow_yanked: - reason = link.yanked_reason or '' - # Mark this as a unicode string to prevent "UnicodeEncodeError: - # 'ascii' codec can't encode character" in Python 2 when - # the reason contains non-ascii characters. - return (False, u'yanked for reason: {}'.format(reason)) - - if link.egg_fragment: - egg_info = link.egg_fragment - ext = link.ext - else: - egg_info, ext = link.splitext() - if not ext: - return (False, 'not a file') - if ext not in SUPPORTED_EXTENSIONS: - return (False, 'unsupported archive format: %s' % ext) - if "binary" not in self._formats and ext == WHEEL_EXTENSION and not self._ignore_compatibility: - reason = 'No binaries permitted for %s' % self.project_name - return (False, reason) - if "macosx10" in link.path and ext == '.zip' and not self._ignore_compatibility: - return (False, 'macosx10 one') - if ext == WHEEL_EXTENSION: - try: - wheel = Wheel(link.filename) - except InvalidWheelFilename: - return (False, 'invalid wheel filename') - if canonicalize_name(wheel.name) != self._canonical_name: - reason = 'wrong project name (not %s)' % self.project_name - return (False, reason) - - supported_tags = self._target_python.get_tags() - if not wheel.supported(supported_tags) and not self._ignore_compatibility: - # Include the wheel's tags in the reason string to - # simplify troubleshooting compatibility issues. - file_tags = wheel.get_formatted_file_tags() - reason = ( - "none of the wheel's tags match: {}".format( - ', '.join(file_tags) - ) - ) - return (False, reason) - - version = wheel.version - - # This should be up by the self.ok_binary check, but see issue 2700. - if "source" not in self._formats and ext != WHEEL_EXTENSION: - return (False, 'No sources permitted for %s' % self.project_name) - - if not version: - version = _extract_version_from_fragment( - egg_info, self._canonical_name, - ) - if not version: - return ( - False, 'Missing project version for %s' % self.project_name, - ) - - match = self._py_version_re.search(version) - if match: - version = version[:match.start()] - py_version = match.group(1) - if py_version != self._target_python.py_version: - return (False, 'Python version is incorrect') - - supports_python = _check_link_requires_python( - link, version_info=self._target_python.py_version_info, - ignore_requires_python=self._ignore_requires_python, - ) - if not supports_python and not self._ignore_compatibility: - # Return None for the reason text to suppress calling - # _log_skipped_link(). - return (False, None) - - logger.debug('Found link %s, version: %s', link, version) - - return (True, version) - - -def filter_unallowed_hashes( - candidates, # type: List[InstallationCandidate] - hashes, # type: Hashes - project_name, # type: str -): - # type: (...) -> List[InstallationCandidate] - """ - Filter out candidates whose hashes aren't allowed, and return a new - list of candidates. - - If at least one candidate has an allowed hash, then all candidates with - either an allowed hash or no hash specified are returned. Otherwise, - the given candidates are returned. - - Including the candidates with no hash specified when there is a match - allows a warning to be logged if there is a more preferred candidate - with no hash specified. Returning all candidates in the case of no - matches lets pip report the hash of the candidate that would otherwise - have been installed (e.g. permitting the user to more easily update - their requirements file with the desired hash). - """ - if not hashes: - logger.debug( - 'Given no hashes to check %s links for project %r: ' - 'discarding no candidates', - len(candidates), - project_name, - ) - # Make sure we're not returning back the given value. - return list(candidates) - - matches_or_no_digest = [] - # Collect the non-matches for logging purposes. - non_matches = [] - match_count = 0 - for candidate in candidates: - link = candidate.link - if not link.has_hash: - pass - elif link.is_hash_allowed(hashes=hashes): - match_count += 1 - else: - non_matches.append(candidate) - continue - - matches_or_no_digest.append(candidate) - - if match_count: - filtered = matches_or_no_digest - else: - # Make sure we're not returning back the given value. - filtered = list(candidates) - - if len(filtered) == len(candidates): - discard_message = 'discarding no candidates' - else: - discard_message = 'discarding {} non-matches:\n {}'.format( - len(non_matches), - '\n '.join(str(candidate.link) for candidate in non_matches) - ) - - logger.debug( - 'Checked %s links for project %r against %s hashes ' - '(%s matches, %s no digest): %s', - len(candidates), - project_name, - hashes.digest_count, - match_count, - len(matches_or_no_digest) - match_count, - discard_message - ) - - return filtered - - -class CandidatePreferences(object): - - """ - Encapsulates some of the preferences for filtering and sorting - InstallationCandidate objects. - """ - - def __init__( - self, - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - ): - # type: (...) -> None - """ - :param allow_all_prereleases: Whether to allow all pre-releases. - """ - self.allow_all_prereleases = allow_all_prereleases - self.prefer_binary = prefer_binary - - -class BestCandidateResult(object): - """A collection of candidates, returned by `PackageFinder.find_best_candidate`. - - This class is only intended to be instantiated by CandidateEvaluator's - `compute_best_candidate()` method. - """ - - def __init__( - self, - candidates, # type: List[InstallationCandidate] - applicable_candidates, # type: List[InstallationCandidate] - best_candidate, # type: Optional[InstallationCandidate] - ): - # type: (...) -> None - """ - :param candidates: A sequence of all available candidates found. - :param applicable_candidates: The applicable candidates. - :param best_candidate: The most preferred candidate found, or None - if no applicable candidates were found. - """ - assert set(applicable_candidates) <= set(candidates) - - if best_candidate is None: - assert not applicable_candidates - else: - assert best_candidate in applicable_candidates - - self._applicable_candidates = applicable_candidates - self._candidates = candidates - - self.best_candidate = best_candidate - - def iter_all(self): - # type: () -> Iterable[InstallationCandidate] - """Iterate through all candidates. - """ - return iter(self._candidates) - - def iter_applicable(self): - # type: () -> Iterable[InstallationCandidate] - """Iterate through the applicable candidates. - """ - return iter(self._applicable_candidates) - - -class CandidateEvaluator(object): - - """ - Responsible for filtering and sorting candidates for installation based - on what tags are valid. - """ - - @classmethod - def create( - cls, - project_name, # type: str - target_python=None, # type: Optional[TargetPython] - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> CandidateEvaluator - """Create a CandidateEvaluator object. - - :param target_python: The target Python interpreter to use when - checking compatibility. If None (the default), a TargetPython - object will be constructed from the running Python. - :param specifier: An optional object implementing `filter` - (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable - versions. - :param hashes: An optional collection of allowed hashes. - """ - if target_python is None: - target_python = TargetPython() - if specifier is None: - specifier = specifiers.SpecifierSet() - - supported_tags = target_python.get_tags() - - return cls( - project_name=project_name, - supported_tags=supported_tags, - specifier=specifier, - prefer_binary=prefer_binary, - allow_all_prereleases=allow_all_prereleases, - hashes=hashes, - ) - - def __init__( - self, - project_name, # type: str - supported_tags, # type: List[Pep425Tag] - specifier, # type: specifiers.BaseSpecifier - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> None - """ - :param supported_tags: The PEP 425 tags supported by the target - Python in order of preference (most preferred first). - """ - self._allow_all_prereleases = allow_all_prereleases - self._hashes = hashes - self._prefer_binary = prefer_binary - self._project_name = project_name - self._specifier = specifier - self._supported_tags = supported_tags - - def get_applicable_candidates( - self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> List[InstallationCandidate] - """ - Return the applicable candidates from a list of candidates. - """ - # Using None infers from the specifier instead. - allow_prereleases = self._allow_all_prereleases or None - specifier = self._specifier - versions = { - str(v) for v in specifier.filter( - # We turn the version object into a str here because otherwise - # when we're debundled but setuptools isn't, Python will see - # packaging.version.Version and - # pkg_resources._vendor.packaging.version.Version as different - # types. This way we'll use a str as a common data interchange - # format. If we stop using the pkg_resources provided specifier - # and start using our own, we can drop the cast to str(). - (str(c.version) for c in candidates), - prereleases=allow_prereleases, - ) - } - - # Again, converting version to str to deal with debundling. - applicable_candidates = [ - c for c in candidates if str(c.version) in versions - ] - - return filter_unallowed_hashes( - candidates=applicable_candidates, - hashes=self._hashes, - project_name=self._project_name, - ) - - def _sort_key(self, candidate, ignore_compatibility=True): - # type: (InstallationCandidate, bool) -> CandidateSortingKey - """ - Function to pass as the `key` argument to a call to sorted() to sort - InstallationCandidates by preference. - - Returns a tuple such that tuples sorting as greater using Python's - default comparison operator are more preferred. - - The preference is as follows: - - First and foremost, candidates with allowed (matching) hashes are - always preferred over candidates without matching hashes. This is - because e.g. if the only candidate with an allowed hash is yanked, - we still want to use that candidate. - - Second, excepting hash considerations, candidates that have been - yanked (in the sense of PEP 592) are always less preferred than - candidates that haven't been yanked. Then: - - If not finding wheels, they are sorted by version only. - If finding wheels, then the sort order is by version, then: - 1. existing installs - 2. wheels ordered via Wheel.support_index_min(self._supported_tags) - 3. source archives - If prefer_binary was set, then all wheels are sorted above sources. - - Note: it was considered to embed this logic into the Link - comparison operators, but then different sdist links - with the same version, would have to be considered equal - """ - valid_tags = self._supported_tags - support_num = len(valid_tags) - build_tag = () # type: BuildTag - binary_preference = 0 - link = candidate.link - if link.is_wheel: - # can raise InvalidWheelFilename - wheel = Wheel(link.filename) - if not wheel.supported(valid_tags) and not ignore_compatibility: - raise UnsupportedWheel( - "%s is not a supported wheel for this platform. It " - "can't be sorted." % wheel.filename - ) - if self._prefer_binary: - binary_preference = 1 - tags = self.valid_tags if not ignore_compatibility else None - try: - pri = -(wheel.support_index_min(tags=tags)) - except TypeError: - pri = -(support_num) - if wheel.build_tag is not None: - match = re.match(r'^(\d+)(.*)$', wheel.build_tag) - build_tag_groups = match.groups() - build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) - else: # sdist - pri = -(support_num) - has_allowed_hash = int(link.is_hash_allowed(self._hashes)) - yank_value = -1 * int(link.is_yanked) # -1 for yanked. - return ( - has_allowed_hash, yank_value, binary_preference, candidate.version, - build_tag, pri, - ) - - def sort_best_candidate( - self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> Optional[InstallationCandidate] - """ - Return the best candidate per the instance's sort order, or None if - no candidate is acceptable. - """ - if not candidates: - return None - - best_candidate = max(candidates, key=self._sort_key) - - # Log a warning per PEP 592 if necessary before returning. - link = best_candidate.link - if link.is_yanked: - reason = link.yanked_reason or '' - msg = ( - # Mark this as a unicode string to prevent - # "UnicodeEncodeError: 'ascii' codec can't encode character" - # in Python 2 when the reason contains non-ascii characters. - u'The candidate selected for download or install is a ' - 'yanked version: {candidate}\n' - 'Reason for being yanked: {reason}' - ).format(candidate=best_candidate, reason=reason) - logger.warning(msg) - - return best_candidate - - def compute_best_candidate( - self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> BestCandidateResult - """ - Compute and return a `BestCandidateResult` instance. - """ - applicable_candidates = self.get_applicable_candidates(candidates) - - best_candidate = self.sort_best_candidate(applicable_candidates) - - return BestCandidateResult( - candidates, - applicable_candidates=applicable_candidates, - best_candidate=best_candidate, - ) - - -class PackageFinder(object): - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links. - """ - - def __init__( - self, - link_collector, # type: LinkCollector - target_python, # type: TargetPython - allow_yanked, # type: bool - format_control=None, # type: Optional[FormatControl] - candidate_prefs=None, # type: CandidatePreferences - ignore_requires_python=None, # type: Optional[bool] - ignore_compatibility=None, # type: Optional[bool] - ): - # type: (...) -> None - """ - This constructor is primarily meant to be used by the create() class - method and from tests. - - :param format_control: A FormatControl object, used to control - the selection of source packages / binary packages when consulting - the index and links. - :param candidate_prefs: Options to use when creating a - CandidateEvaluator object. - """ - if candidate_prefs is None: - candidate_prefs = CandidatePreferences() - if ignore_compatibility is None: - ignore_compatibility = False - - format_control = format_control or FormatControl(set(), set()) - - self._allow_yanked = allow_yanked - self._candidate_prefs = candidate_prefs - self._ignore_requires_python = ignore_requires_python - self._link_collector = link_collector - self._target_python = target_python - self._ignore_compatibility = ignore_compatibility - - self.format_control = format_control - - # These are boring links that have already been logged somehow. - self._logged_links = set() # type: Set[Link] - - # Kenneth's Hack - self.extra = None - - # Don't include an allow_yanked default value to make sure each call - # site considers whether yanked releases are allowed. This also causes - # that decision to be made explicit in the calling code, which helps - # people when reading the code. - @classmethod - def create( - cls, - link_collector, # type: LinkCollector - selection_prefs, # type: SelectionPreferences - target_python=None, # type: Optional[TargetPython] - ): - # type: (...) -> PackageFinder - """Create a PackageFinder. - - :param selection_prefs: The candidate selection preferences, as a - SelectionPreferences object. - :param target_python: The target Python interpreter to use when - checking compatibility. If None (the default), a TargetPython - object will be constructed from the running Python. - """ - if target_python is None: - target_python = TargetPython() - - candidate_prefs = CandidatePreferences( - prefer_binary=selection_prefs.prefer_binary, - allow_all_prereleases=selection_prefs.allow_all_prereleases, - ) - - return cls( - candidate_prefs=candidate_prefs, - link_collector=link_collector, - target_python=target_python, - allow_yanked=selection_prefs.allow_yanked, - format_control=selection_prefs.format_control, - ignore_requires_python=selection_prefs.ignore_requires_python, - ) - - @staticmethod - def get_extras_links(links): - requires = [] - extras = {} - - current_list = requires - - for link in links: - if not link: - current_list = requires - if link.startswith('['): - current_list = [] - extras[link[1:-1]] = current_list - else: - current_list.append(link) - return extras - - @property - def search_scope(self): - # type: () -> SearchScope - return self._link_collector.search_scope - - @search_scope.setter - def search_scope(self, search_scope): - # type: (SearchScope) -> None - self._link_collector.search_scope = search_scope - - @property - def find_links(self): - # type: () -> List[str] - return self._link_collector.find_links - - @property - def index_urls(self): - # type: () -> List[str] - return self.search_scope.index_urls - - @property - def trusted_hosts(self): - # type: () -> Iterable[str] - for host_port in self._link_collector.session.pip_trusted_origins: - yield build_netloc(*host_port) - - @property - def allow_all_prereleases(self): - # type: () -> bool - return self._candidate_prefs.allow_all_prereleases - - def set_allow_all_prereleases(self): - # type: () -> None - self._candidate_prefs.allow_all_prereleases = True - - def make_link_evaluator(self, project_name): - # type: (str) -> LinkEvaluator - canonical_name = canonicalize_name(project_name) - formats = self.format_control.get_allowed_formats(canonical_name) - - return LinkEvaluator( - project_name=project_name, - canonical_name=canonical_name, - formats=formats, - target_python=self._target_python, - allow_yanked=self._allow_yanked, - ignore_requires_python=self._ignore_requires_python, - ignore_compatibility=self._ignore_compatibility - ) - - def _sort_links(self, links): - # type: (Iterable[Link]) -> List[Link] - """ - Returns elements of links in order, non-egg links first, egg links - second, while eliminating duplicates - """ - eggs, no_eggs = [], [] - seen = set() # type: Set[Link] - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _log_skipped_link(self, link, reason): - # type: (Link, Text) -> None - if link not in self._logged_links: - # Mark this as a unicode string to prevent "UnicodeEncodeError: - # 'ascii' codec can't encode character" in Python 2 when - # the reason contains non-ascii characters. - # Also, put the link at the end so the reason is more visible - # and because the link string is usually very long. - logger.debug(u'Skipping link: %s: %s', reason, link) - self._logged_links.add(link) - - def get_install_candidate(self, link_evaluator, link): - # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] - """ - If the link is a candidate for install, convert it to an - InstallationCandidate and return it. Otherwise, return None. - """ - is_candidate, result = link_evaluator.evaluate_link(link) - if not is_candidate: - if result: - self._log_skipped_link(link, reason=result) - return None - - return InstallationCandidate( - project=link_evaluator.project_name, - link=link, - # Convert the Text result to str since InstallationCandidate - # accepts str. - version=str(result), - requires_python=getattr(link, "requires_python", None) - ) - - def evaluate_links(self, link_evaluator, links): - # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] - """ - Convert links that are candidates to InstallationCandidate objects. - """ - candidates = [] - for link in self._sort_links(links): - candidate = self.get_install_candidate(link_evaluator, link) - if candidate is not None: - candidates.append(candidate) - - return candidates - - def find_all_candidates(self, project_name): - # type: (str) -> List[InstallationCandidate] - """Find all available InstallationCandidate for project_name - - This checks index_urls and find_links. - All versions found are returned as an InstallationCandidate list. - - See LinkEvaluator.evaluate_link() for details on which files - are accepted. - """ - collected_links = self._link_collector.collect_links(project_name) - - link_evaluator = self.make_link_evaluator(project_name) - - find_links_versions = self.evaluate_links( - link_evaluator, - links=collected_links.find_links, - ) - - page_versions = [] - for page_url, page_links in collected_links.pages.items(): - logger.debug('Analyzing links from page %s', page_url) - with indent_log(): - new_versions = self.evaluate_links( - link_evaluator, - links=page_links, - ) - page_versions.extend(new_versions) - - file_versions = self.evaluate_links( - link_evaluator, - links=collected_links.files, - ) - if file_versions: - file_versions.sort(reverse=True) - logger.debug( - 'Local files found: %s', - ', '.join([ - url_to_path(candidate.link.url) - for candidate in file_versions - ]) - ) - - # This is an intentional priority ordering - return file_versions + find_links_versions + page_versions - - def make_candidate_evaluator( - self, - project_name, # type: str - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> CandidateEvaluator - """Create a CandidateEvaluator object to use. - """ - candidate_prefs = self._candidate_prefs - return CandidateEvaluator.create( - project_name=project_name, - target_python=self._target_python, - prefer_binary=candidate_prefs.prefer_binary, - allow_all_prereleases=candidate_prefs.allow_all_prereleases, - specifier=specifier, - hashes=hashes, - ) - - def find_best_candidate( - self, - project_name, # type: str - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> BestCandidateResult - """Find matches for the given project and specifier. - - :param specifier: An optional object implementing `filter` - (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable - versions. - - :return: A `BestCandidateResult` instance. - """ - candidates = self.find_all_candidates(project_name) - candidate_evaluator = self.make_candidate_evaluator( - project_name=project_name, - specifier=specifier, - hashes=hashes, - ) - return candidate_evaluator.compute_best_candidate(candidates) - - def find_requirement(self, req, upgrade): - # type: (InstallRequirement, bool) -> Optional[Link] - """Try to find a Link matching req - - Expects req, an InstallRequirement and upgrade, a boolean - Returns a Link if found, - Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise - """ - hashes = req.hashes(trust_internet=False) - best_candidate_result = self.find_best_candidate( - req.name, specifier=req.specifier, hashes=hashes, - ) - best_candidate = best_candidate_result.best_candidate - - installed_version = None # type: Optional[_BaseVersion] - if req.satisfied_by is not None: - installed_version = parse_version(req.satisfied_by.version) - - def _format_versions(cand_iter): - # This repeated parse_version and str() conversion is needed to - # handle different vendoring sources from pipenv.patched.notpip and pkg_resources. - # If we stop using the pkg_resources provided specifier and start - # using our own, we can drop the cast to str(). - return ", ".join(sorted( - {str(c.version) for c in cand_iter}, - key=parse_version, - )) or "none" - - if installed_version is None and best_candidate is None: - logger.critical( - 'Could not find a version that satisfies the requirement %s ' - '(from versions: %s)', - req, - _format_versions(best_candidate_result.iter_all()), - ) - - raise DistributionNotFound( - 'No matching distribution found for %s' % req - ) - - best_installed = False - if installed_version and ( - best_candidate is None or - best_candidate.version <= installed_version): - best_installed = True - - if not upgrade and installed_version is not None: - if best_installed: - logger.debug( - 'Existing installed version (%s) is most up-to-date and ' - 'satisfies requirement', - installed_version, - ) - else: - logger.debug( - 'Existing installed version (%s) satisfies requirement ' - '(most up-to-date version is %s)', - installed_version, - best_candidate.version, - ) - return None - - if best_installed: - # We have an existing version, and its the best version - logger.debug( - 'Installed version (%s) is most up-to-date (past versions: ' - '%s)', - installed_version, - _format_versions(best_candidate_result.iter_applicable()), - ) - raise BestVersionAlreadyInstalled - - logger.debug( - 'Using version %s (newest of versions: %s)', - best_candidate.version, - _format_versions(best_candidate_result.iter_applicable()), - ) - return best_candidate.link - - -def _find_name_version_sep(fragment, canonical_name): - # type: (str, str) -> int - """Find the separator's index based on the package's canonical name. - - :param fragment: A + filename "fragment" (stem) or - egg fragment. - :param canonical_name: The package's canonical name. - - This function is needed since the canonicalized name does not necessarily - have the same length as the egg info's name part. An example:: - - >>> fragment = 'foo__bar-1.0' - >>> canonical_name = 'foo-bar' - >>> _find_name_version_sep(fragment, canonical_name) - 8 - """ - # Project name and version must be separated by one single dash. Find all - # occurrences of dashes; if the string in front of it matches the canonical - # name, this is the one separating the name and version parts. - for i, c in enumerate(fragment): - if c != "-": - continue - if canonicalize_name(fragment[:i]) == canonical_name: - return i - raise ValueError("{} does not match {}".format(fragment, canonical_name)) - - -def _extract_version_from_fragment(fragment, canonical_name): - # type: (str, str) -> Optional[str] - """Parse the version string from a + filename - "fragment" (stem) or egg fragment. - - :param fragment: The string to parse. E.g. foo-2.1 - :param canonical_name: The canonicalized name of the package this - belongs to. - """ - try: - version_start = _find_name_version_sep(fragment, canonical_name) + 1 - except ValueError: - return None - version = fragment[version_start:] - if not version: - return None - return version diff --git a/pipenv/patched/notpip/_internal/operations/generate_metadata.py b/pipenv/patched/notpip/_internal/operations/generate_metadata.py deleted file mode 100644 index dd30f5536a..0000000000 --- a/pipenv/patched/notpip/_internal/operations/generate_metadata.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Metadata generation logic for source distributions. -""" - -import logging -import os - -from pipenv.patched.notpip._internal.exceptions import InstallationError -from pipenv.patched.notpip._internal.utils.misc import ensure_dir -from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_shim_args -from pipenv.patched.notpip._internal.utils.subprocess import call_subprocess -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.vcs import vcs - -if MYPY_CHECK_RUNNING: - from typing import Callable, List - from pipenv.patched.notpip._internal.req.req_install import InstallRequirement - -logger = logging.getLogger(__name__) - - -def get_metadata_generator(install_req): - # type: (InstallRequirement) -> Callable[[InstallRequirement], str] - """Return a callable metadata generator for this InstallRequirement. - - A metadata generator takes an InstallRequirement (install_req) as an input, - generates metadata via the appropriate process for that install_req and - returns the generated metadata directory. - """ - if not install_req.use_pep517: - return _generate_metadata_legacy - - return _generate_metadata - - -def _find_egg_info(source_directory, is_editable): - # type: (str, bool) -> str - """Find an .egg-info in `source_directory`, based on `is_editable`. - """ - - def looks_like_virtual_env(path): - # type: (str) -> bool - return ( - os.path.lexists(os.path.join(path, 'bin', 'python')) or - os.path.exists(os.path.join(path, 'Scripts', 'Python.exe')) - ) - - def locate_editable_egg_info(base): - # type: (str) -> List[str] - candidates = [] # type: List[str] - for root, dirs, files in os.walk(base): - for dir_ in vcs.dirnames: - if dir_ in dirs: - dirs.remove(dir_) - # Iterate over a copy of ``dirs``, since mutating - # a list while iterating over it can cause trouble. - # (See https://github.com/pypa/pip/pull/462.) - for dir_ in list(dirs): - if looks_like_virtual_env(os.path.join(root, dir_)): - dirs.remove(dir_) - # Also don't search through tests - elif dir_ == 'test' or dir_ == 'tests': - dirs.remove(dir_) - candidates.extend(os.path.join(root, dir_) for dir_ in dirs) - return [f for f in candidates if f.endswith('.egg-info')] - - def depth_of_directory(dir_): - # type: (str) -> int - return ( - dir_.count(os.path.sep) + - (os.path.altsep and dir_.count(os.path.altsep) or 0) - ) - - base = source_directory - if is_editable: - filenames = locate_editable_egg_info(base) - else: - base = os.path.join(base, 'pip-egg-info') - filenames = os.listdir(base) - - if not filenames: - raise InstallationError( - "Files/directories not found in %s" % base - ) - - # If we have more than one match, we pick the toplevel one. This - # can easily be the case if there is a dist folder which contains - # an extracted tarball for testing purposes. - if len(filenames) > 1: - filenames.sort(key=depth_of_directory) - - return os.path.join(base, filenames[0]) - - -def _generate_metadata_legacy(install_req): - # type: (InstallRequirement) -> str - req_details_str = install_req.name or "from {}".format(install_req.link) - logger.debug( - 'Running setup.py (path:%s) egg_info for package %s', - install_req.setup_py_path, req_details_str, - ) - - # Compose arguments for subprocess call - base_cmd = make_setuptools_shim_args(install_req.setup_py_path) - if install_req.isolated: - base_cmd += ["--no-user-cfg"] - - # For non-editable installs, don't put the .egg-info files at the root, - # to avoid confusion due to the source code being considered an installed - # egg. - egg_base_option = [] # type: List[str] - if not install_req.editable: - egg_info_dir = os.path.join( - install_req.unpacked_source_directory, 'pip-egg-info', - ) - egg_base_option = ['--egg-base', egg_info_dir] - - # setuptools complains if the target directory does not exist. - ensure_dir(egg_info_dir) - - with install_req.build_env: - call_subprocess( - base_cmd + ["egg_info"] + egg_base_option, - cwd=install_req.unpacked_source_directory, - command_desc='python setup.py egg_info', - ) - - # Return the .egg-info directory. - return _find_egg_info( - install_req.unpacked_source_directory, - install_req.editable, - ) - - -def _generate_metadata(install_req): - # type: (InstallRequirement) -> str - return install_req.prepare_pep517_metadata() diff --git a/pipenv/patched/notpip/_internal/wheel.py b/pipenv/patched/notpip/_internal/wheel.py deleted file mode 100644 index d4c155b481..0000000000 --- a/pipenv/patched/notpip/_internal/wheel.py +++ /dev/null @@ -1,1181 +0,0 @@ -""" -Support for installing and building the "wheel" binary package format. -""" - -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False -# mypy: disallow-untyped-defs=False - -from __future__ import absolute_import - -import collections -import compileall -import csv -import hashlib -import logging -import os.path -import re -import shutil -import stat -import sys -import warnings -from base64 import urlsafe_b64encode -from email.parser import Parser - -from pipenv.patched.notpip._vendor import pkg_resources -from pipenv.patched.notpip._vendor.distlib.scripts import ScriptMaker -from pipenv.patched.notpip._vendor.distlib.util import get_export_entry -from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name -from pipenv.patched.notpip._vendor.six import StringIO - -from pipenv.patched.notpip._internal import pep425tags -from pipenv.patched.notpip._internal.exceptions import ( - InstallationError, - InvalidWheelFilename, - UnsupportedWheel, -) -from pipenv.patched.notpip._internal.locations import distutils_scheme, get_major_minor_version -from pipenv.patched.notpip._internal.models.link import Link -from pipenv.patched.notpip._internal.utils.logging import indent_log -from pipenv.patched.notpip._internal.utils.marker_files import has_delete_marker_file -from pipenv.patched.notpip._internal.utils.misc import captured_stdout, ensure_dir, read_chunks -from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_shim_args -from pipenv.patched.notpip._internal.utils.subprocess import ( - LOG_DIVIDER, - call_subprocess, - format_command_args, - runner_with_spinner_message, -) -from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory -from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING -from pipenv.patched.notpip._internal.utils.ui import open_spinner -from pipenv.patched.notpip._internal.utils.unpacking import unpack_file -from pipenv.patched.notpip._internal.utils.urls import path_to_url - -if MYPY_CHECK_RUNNING: - from typing import ( - Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, - Iterable, Callable, Set, - ) - from pipenv.patched.notpip._vendor.packaging.requirements import Requirement - from pipenv.patched.notpip._internal.req.req_install import InstallRequirement - from pipenv.patched.notpip._internal.operations.prepare import ( - RequirementPreparer - ) - from pipenv.patched.notpip._internal.cache import WheelCache - from pipenv.patched.notpip._internal.pep425tags import Pep425Tag - - InstalledCSVRow = Tuple[str, ...] - - BinaryAllowedPredicate = Callable[[InstallRequirement], bool] - - -VERSION_COMPATIBLE = (1, 0) - - -logger = logging.getLogger(__name__) - - -def normpath(src, p): - return os.path.relpath(src, p).replace(os.path.sep, '/') - - -def hash_file(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[Any, int] - """Return (hash, length) for path using hashlib.sha256()""" - h = hashlib.sha256() - length = 0 - with open(path, 'rb') as f: - for block in read_chunks(f, size=blocksize): - length += len(block) - h.update(block) - return (h, length) # type: ignore - - -def rehash(path, blocksize=1 << 20): - # type: (str, int) -> Tuple[str, str] - """Return (encoded_digest, length) for path using hashlib.sha256()""" - h, length = hash_file(path, blocksize) - digest = 'sha256=' + urlsafe_b64encode( - h.digest() - ).decode('latin1').rstrip('=') - # unicode/str python2 issues - return (digest, str(length)) # type: ignore - - -def open_for_csv(name, mode): - # type: (str, Text) -> IO - if sys.version_info[0] < 3: - nl = {} # type: Dict[str, Any] - bin = 'b' - else: - nl = {'newline': ''} # type: Dict[str, Any] - bin = '' - return open(name, mode + bin, **nl) - - -def replace_python_tag(wheelname, new_tag): - # type: (str, str) -> str - """Replace the Python tag in a wheel file name with a new value. - """ - parts = wheelname.split('-') - parts[-3] = new_tag - return '-'.join(parts) - - -def fix_script(path): - # type: (str) -> Optional[bool] - """Replace #!python with #!/path/to/python - Return True if file was changed.""" - # XXX RECORD hashes will need to be updated - if os.path.isfile(path): - with open(path, 'rb') as script: - firstline = script.readline() - if not firstline.startswith(b'#!python'): - return False - exename = sys.executable.encode(sys.getfilesystemencoding()) - firstline = b'#!' + exename + os.linesep.encode("ascii") - rest = script.read() - with open(path, 'wb') as script: - script.write(firstline) - script.write(rest) - return True - return None - - -dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P.+?))?) - \.dist-info$""", re.VERBOSE) - - -def root_is_purelib(name, wheeldir): - # type: (str, str) -> bool - """ - Return True if the extracted wheel in wheeldir should go into purelib. - """ - name_folded = name.replace("-", "_") - for item in os.listdir(wheeldir): - match = dist_info_re.match(item) - if match and match.group('name') == name_folded: - with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: - for line in wheel: - line = line.lower().rstrip() - if line == "root-is-purelib: true": - return True - return False - - -def get_entrypoints(filename): - # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] - if not os.path.exists(filename): - return {}, {} - - # This is done because you can pass a string to entry_points wrappers which - # means that they may or may not be valid INI files. The attempt here is to - # strip leading and trailing whitespace in order to make them valid INI - # files. - with open(filename) as fp: - data = StringIO() - for line in fp: - data.write(line.strip()) - data.write("\n") - data.seek(0) - - # get the entry points and then the script names - entry_points = pkg_resources.EntryPoint.parse_map(data) - console = entry_points.get('console_scripts', {}) - gui = entry_points.get('gui_scripts', {}) - - def _split_ep(s): - """get the string representation of EntryPoint, remove space and split - on '='""" - return str(s).replace(" ", "").split("=") - - # convert the EntryPoint objects into strings with module:function - console = dict(_split_ep(v) for v in console.values()) - gui = dict(_split_ep(v) for v in gui.values()) - return console, gui - - -def message_about_scripts_not_on_PATH(scripts): - # type: (Sequence[str]) -> Optional[str] - """Determine if any scripts are not on PATH and format a warning. - - Returns a warning message if one or more scripts are not on PATH, - otherwise None. - """ - if not scripts: - return None - - # Group scripts by the path they were installed in - grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] - for destfile in scripts: - parent_dir = os.path.dirname(destfile) - script_name = os.path.basename(destfile) - grouped_by_dir[parent_dir].add(script_name) - - # We don't want to warn for directories that are on PATH. - not_warn_dirs = [ - os.path.normcase(i).rstrip(os.sep) for i in - os.environ.get("PATH", "").split(os.pathsep) - ] - # If an executable sits with sys.executable, we don't warn for it. - # This covers the case of venv invocations without activating the venv. - not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) - warn_for = { - parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() - if os.path.normcase(parent_dir) not in not_warn_dirs - } # type: Dict[str, Set[str]] - if not warn_for: - return None - - # Format a message - msg_lines = [] - for parent_dir, dir_scripts in warn_for.items(): - sorted_scripts = sorted(dir_scripts) # type: List[str] - if len(sorted_scripts) == 1: - start_text = "script {} is".format(sorted_scripts[0]) - else: - start_text = "scripts {} are".format( - ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] - ) - - msg_lines.append( - "The {} installed in '{}' which is not on PATH." - .format(start_text, parent_dir) - ) - - last_line_fmt = ( - "Consider adding {} to PATH or, if you prefer " - "to suppress this warning, use --no-warn-script-location." - ) - if len(msg_lines) == 1: - msg_lines.append(last_line_fmt.format("this directory")) - else: - msg_lines.append(last_line_fmt.format("these directories")) - - # Returns the formatted multiline message - return "\n".join(msg_lines) - - -def sorted_outrows(outrows): - # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] - """ - Return the given rows of a RECORD file in sorted order. - - Each row is a 3-tuple (path, hash, size) and corresponds to a record of - a RECORD file (see PEP 376 and PEP 427 for details). For the rows - passed to this function, the size can be an integer as an int or string, - or the empty string. - """ - # Normally, there should only be one row per path, in which case the - # second and third elements don't come into play when sorting. - # However, in cases in the wild where a path might happen to occur twice, - # we don't want the sort operation to trigger an error (but still want - # determinism). Since the third element can be an int or string, we - # coerce each element to a string to avoid a TypeError in this case. - # For additional background, see-- - # https://github.com/pypa/pip/issues/5868 - return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) - - -def get_csv_rows_for_installed( - old_csv_rows, # type: Iterable[List[str]] - installed, # type: Dict[str, str] - changed, # type: set - generated, # type: List[str] - lib_dir, # type: str -): - # type: (...) -> List[InstalledCSVRow] - """ - :param installed: A map from archive RECORD path to installation RECORD - path. - """ - installed_rows = [] # type: List[InstalledCSVRow] - for row in old_csv_rows: - if len(row) > 3: - logger.warning( - 'RECORD line has more than three elements: {}'.format(row) - ) - # Make a copy because we are mutating the row. - row = list(row) - old_path = row[0] - new_path = installed.pop(old_path, old_path) - row[0] = new_path - if new_path in changed: - digest, length = rehash(new_path) - row[1] = digest - row[2] = length - installed_rows.append(tuple(row)) - for f in generated: - digest, length = rehash(f) - installed_rows.append((normpath(f, lib_dir), digest, str(length))) - for f in installed: - installed_rows.append((installed[f], '', '')) - return installed_rows - - -class MissingCallableSuffix(Exception): - pass - - -def _raise_for_invalid_entrypoint(specification): - entry = get_export_entry(specification) - if entry is not None and entry.suffix is None: - raise MissingCallableSuffix(str(entry)) - - -class PipScriptMaker(ScriptMaker): - def make(self, specification, options=None): - _raise_for_invalid_entrypoint(specification) - return super(PipScriptMaker, self).make(specification, options) - - -def move_wheel_files( - name, # type: str - req, # type: Requirement - wheeldir, # type: str - user=False, # type: bool - home=None, # type: Optional[str] - root=None, # type: Optional[str] - pycompile=True, # type: bool - scheme=None, # type: Optional[Mapping[str, str]] - isolated=False, # type: bool - prefix=None, # type: Optional[str] - warn_script_location=True # type: bool -): - # type: (...) -> None - """Install a wheel""" - # TODO: Investigate and break this up. - # TODO: Look into moving this into a dedicated class for representing an - # installation. - - if not scheme: - scheme = distutils_scheme( - name, user=user, home=home, root=root, isolated=isolated, - prefix=prefix, - ) - - if root_is_purelib(name, wheeldir): - lib_dir = scheme['purelib'] - else: - lib_dir = scheme['platlib'] - - info_dir = [] # type: List[str] - data_dirs = [] - source = wheeldir.rstrip(os.path.sep) + os.path.sep - - # Record details of the files moved - # installed = files copied from the wheel to the destination - # changed = files changed while installing (scripts #! line typically) - # generated = files newly generated during the install (script wrappers) - installed = {} # type: Dict[str, str] - changed = set() - generated = [] # type: List[str] - - # Compile all of the pyc files that we're going to be installing - if pycompile: - with captured_stdout() as stdout: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore') - compileall.compile_dir(source, force=True, quiet=True) - logger.debug(stdout.getvalue()) - - def record_installed(srcfile, destfile, modified=False): - """Map archive RECORD paths to installation RECORD paths.""" - oldpath = normpath(srcfile, wheeldir) - newpath = normpath(destfile, lib_dir) - installed[oldpath] = newpath - if modified: - changed.add(destfile) - - def clobber(source, dest, is_base, fixer=None, filter=None): - ensure_dir(dest) # common for the 'include' path - - for dir, subdirs, files in os.walk(source): - basedir = dir[len(source):].lstrip(os.path.sep) - destdir = os.path.join(dest, basedir) - if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): - continue - for s in subdirs: - destsubdir = os.path.join(dest, basedir, s) - if is_base and basedir == '' and destsubdir.endswith('.data'): - data_dirs.append(s) - continue - elif (is_base and - s.endswith('.dist-info') and - canonicalize_name(s).startswith( - canonicalize_name(req.name))): - assert not info_dir, ('Multiple .dist-info directories: ' + - destsubdir + ', ' + - ', '.join(info_dir)) - info_dir.append(destsubdir) - for f in files: - # Skip unwanted files - if filter and filter(f): - continue - srcfile = os.path.join(dir, f) - destfile = os.path.join(dest, basedir, f) - # directory creation is lazy and after the file filtering above - # to ensure we don't install empty dirs; empty dirs can't be - # uninstalled. - ensure_dir(destdir) - - # copyfile (called below) truncates the destination if it - # exists and then writes the new contents. This is fine in most - # cases, but can cause a segfault if pip has loaded a shared - # object (e.g. from pyopenssl through its vendored urllib3) - # Since the shared object is mmap'd an attempt to call a - # symbol in it will then cause a segfault. Unlinking the file - # allows writing of new contents while allowing the process to - # continue to use the old copy. - if os.path.exists(destfile): - os.unlink(destfile) - - # We use copyfile (not move, copy, or copy2) to be extra sure - # that we are not moving directories over (copyfile fails for - # directories) as well as to ensure that we are not copying - # over any metadata because we want more control over what - # metadata we actually copy over. - shutil.copyfile(srcfile, destfile) - - # Copy over the metadata for the file, currently this only - # includes the atime and mtime. - st = os.stat(srcfile) - if hasattr(os, "utime"): - os.utime(destfile, (st.st_atime, st.st_mtime)) - - # If our file is executable, then make our destination file - # executable. - if os.access(srcfile, os.X_OK): - st = os.stat(srcfile) - permissions = ( - st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH - ) - os.chmod(destfile, permissions) - - changed = False - if fixer: - changed = fixer(destfile) - record_installed(srcfile, destfile, changed) - - clobber(source, lib_dir, True) - - assert info_dir, "%s .dist-info directory not found" % req - - # Get the defined entry points - ep_file = os.path.join(info_dir[0], 'entry_points.txt') - console, gui = get_entrypoints(ep_file) - - def is_entrypoint_wrapper(name): - # EP, EP.exe and EP-script.py are scripts generated for - # entry point EP by setuptools - if name.lower().endswith('.exe'): - matchname = name[:-4] - elif name.lower().endswith('-script.py'): - matchname = name[:-10] - elif name.lower().endswith(".pya"): - matchname = name[:-4] - else: - matchname = name - # Ignore setuptools-generated scripts - return (matchname in console or matchname in gui) - - for datadir in data_dirs: - fixer = None - filter = None - for subdir in os.listdir(os.path.join(wheeldir, datadir)): - fixer = None - if subdir == 'scripts': - fixer = fix_script - filter = is_entrypoint_wrapper - source = os.path.join(wheeldir, datadir, subdir) - dest = scheme[subdir] - clobber(source, dest, False, fixer=fixer, filter=filter) - - maker = PipScriptMaker(None, scheme['scripts']) - - # Ensure old scripts are overwritten. - # See https://github.com/pypa/pip/issues/1800 - maker.clobber = True - - # Ensure we don't generate any variants for scripts because this is almost - # never what somebody wants. - # See https://bitbucket.org/pypa/distlib/issue/35/ - maker.variants = {''} - - # This is required because otherwise distlib creates scripts that are not - # executable. - # See https://bitbucket.org/pypa/distlib/issue/32/ - maker.set_mode = True - - scripts_to_generate = [] - - # Special case pip and setuptools to generate versioned wrappers - # - # The issue is that some projects (specifically, pip and setuptools) use - # code in setup.py to create "versioned" entry points - pip2.7 on Python - # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into - # the wheel metadata at build time, and so if the wheel is installed with - # a *different* version of Python the entry points will be wrong. The - # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have - # incorrect versioned entry points, or they will not be able to distribute - # "universal" wheels (i.e., they will need a wheel per Python version). - # - # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we - # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. - # - # To add the level of hack in this section of code, in order to support - # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment - # variable which will control which version scripts get installed. - # - # ENSUREPIP_OPTIONS=altinstall - # - Only pipX.Y and easy_install-X.Y will be generated and installed - # ENSUREPIP_OPTIONS=install - # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note - # that this option is technically if ENSUREPIP_OPTIONS is set and is - # not altinstall - # DEFAULT - # - The default behavior is to install pip, pipX, pipX.Y, easy_install - # and easy_install-X.Y. - pip_script = console.pop('pip', None) - if pip_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append('pip = ' + pip_script) - - if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - scripts_to_generate.append( - 'pip%s = %s' % (sys.version_info[0], pip_script) - ) - - scripts_to_generate.append( - 'pip%s = %s' % (get_major_minor_version(), pip_script) - ) - # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] - for k in pip_ep: - del console[k] - easy_install_script = console.pop('easy_install', None) - if easy_install_script: - if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append( - 'easy_install = ' + easy_install_script - ) - - scripts_to_generate.append( - 'easy_install-%s = %s' % ( - get_major_minor_version(), easy_install_script - ) - ) - # Delete any other versioned easy_install entry points - easy_install_ep = [ - k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) - ] - for k in easy_install_ep: - del console[k] - - # Generate the console and GUI entry points specified in the wheel - scripts_to_generate.extend( - '%s = %s' % kv for kv in console.items() - ) - - gui_scripts_to_generate = [ - '%s = %s' % kv for kv in gui.items() - ] - - generated_console_scripts = [] # type: List[str] - - try: - generated_console_scripts = maker.make_multiple(scripts_to_generate) - generated.extend(generated_console_scripts) - - generated.extend( - maker.make_multiple(gui_scripts_to_generate, {'gui': True}) - ) - except MissingCallableSuffix as e: - entry = e.args[0] - raise InstallationError( - "Invalid script entry point: {} for req: {} - A callable " - "suffix is required. Cf https://packaging.python.org/en/" - "latest/distributing.html#console-scripts for more " - "information.".format(entry, req) - ) - - if warn_script_location: - msg = message_about_scripts_not_on_PATH(generated_console_scripts) - if msg is not None: - logger.warning(msg) - - # Record pip as the installer - installer = os.path.join(info_dir[0], 'INSTALLER') - temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') - with open(temp_installer, 'wb') as installer_file: - installer_file.write(b'pip\n') - shutil.move(temp_installer, installer) - generated.append(installer) - - # Record details of all files installed - record = os.path.join(info_dir[0], 'RECORD') - temp_record = os.path.join(info_dir[0], 'RECORD.pip') - with open_for_csv(record, 'r') as record_in: - with open_for_csv(temp_record, 'w+') as record_out: - reader = csv.reader(record_in) - outrows = get_csv_rows_for_installed( - reader, installed=installed, changed=changed, - generated=generated, lib_dir=lib_dir, - ) - writer = csv.writer(record_out) - # Sort to simplify testing. - for row in sorted_outrows(outrows): - writer.writerow(row) - shutil.move(temp_record, record) - - -def wheel_version(source_dir): - # type: (Optional[str]) -> Optional[Tuple[int, ...]] - """ - Return the Wheel-Version of an extracted wheel, if possible. - - Otherwise, return None if we couldn't parse / extract it. - """ - try: - dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] - - wheel_data = dist.get_metadata('WHEEL') - wheel_data = Parser().parsestr(wheel_data) - - version = wheel_data['Wheel-Version'].strip() - version = tuple(map(int, version.split('.'))) - return version - except Exception: - return None - - -def check_compatibility(version, name): - # type: (Optional[Tuple[int, ...]], str) -> None - """ - Raises errors or warns if called with an incompatible Wheel-Version. - - Pip should refuse to install a Wheel-Version that's a major series - ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when - installing a version only minor version ahead (e.g 1.2 > 1.1). - - version: a 2-tuple representing a Wheel-Version (Major, Minor) - name: name of wheel or package to raise exception about - - :raises UnsupportedWheel: when an incompatible Wheel-Version is given - """ - if not version: - raise UnsupportedWheel( - "%s is in an unsupported or invalid wheel" % name - ) - if version[0] > VERSION_COMPATIBLE[0]: - raise UnsupportedWheel( - "%s's Wheel-Version (%s) is not compatible with this version " - "of pip" % (name, '.'.join(map(str, version))) - ) - elif version > VERSION_COMPATIBLE: - logger.warning( - 'Installing from a newer Wheel-Version (%s)', - '.'.join(map(str, version)), - ) - - -def format_tag(file_tag): - # type: (Tuple[str, ...]) -> str - """ - Format three tags in the form "--". - - :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag). - """ - return '-'.join(file_tag) - - -class Wheel(object): - """A wheel file""" - - # TODO: Maybe move the class into the models sub-package - # TODO: Maybe move the install code into this class - - wheel_file_re = re.compile( - r"""^(?P(?P.+?)-(?P.*?)) - ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) - \.whl|\.dist-info)$""", - re.VERBOSE - ) - - def __init__(self, filename): - # type: (str) -> None - """ - :raises InvalidWheelFilename: when the filename is invalid for a wheel - """ - wheel_info = self.wheel_file_re.match(filename) - if not wheel_info: - raise InvalidWheelFilename( - "%s is not a valid wheel filename." % filename - ) - self.filename = filename - self.name = wheel_info.group('name').replace('_', '-') - # we'll assume "_" means "-" due to wheel naming scheme - # (https://github.com/pypa/pip/issues/1150) - self.version = wheel_info.group('ver').replace('_', '-') - self.build_tag = wheel_info.group('build') - self.pyversions = wheel_info.group('pyver').split('.') - self.abis = wheel_info.group('abi').split('.') - self.plats = wheel_info.group('plat').split('.') - - # All the tag combinations from this file - self.file_tags = { - (x, y, z) for x in self.pyversions - for y in self.abis for z in self.plats - } - - def get_formatted_file_tags(self): - # type: () -> List[str] - """ - Return the wheel's tags as a sorted list of strings. - """ - return sorted(format_tag(tag) for tag in self.file_tags) - - def support_index_min(self, tags): - # type: (List[Pep425Tag]) -> int - """ - Return the lowest index that one of the wheel's file_tag combinations - achieves in the given list of supported tags. - - For example, if there are 8 supported tags and one of the file tags - is first in the list, then return 0. - - :param tags: the PEP 425 tags to check the wheel against, in order - with most preferred first. - - :raises ValueError: If none of the wheel's file tags match one of - the supported tags. - """ - return min(tags.index(tag) for tag in self.file_tags if tag in tags) - - def supported(self, tags): - # type: (List[Pep425Tag]) -> bool - """ - Return whether the wheel is compatible with one of the given tags. - - :param tags: the PEP 425 tags to check the wheel against. - """ - return not self.file_tags.isdisjoint(tags) - - -def _contains_egg_info( - s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): - """Determine whether the string looks like an egg_info. - - :param s: The string to parse. E.g. foo-2.1 - """ - return bool(_egg_info_re.search(s)) - - -def should_use_ephemeral_cache( - req, # type: InstallRequirement - should_unpack, # type: bool - cache_available, # type: bool - check_binary_allowed, # type: BinaryAllowedPredicate -): - # type: (...) -> Optional[bool] - """ - Return whether to build an InstallRequirement object using the - ephemeral cache. - - :param cache_available: whether a cache directory is available for the - should_unpack=True case. - - :return: True or False to build the requirement with ephem_cache=True - or False, respectively; or None not to build the requirement. - """ - if req.constraint: - # never build requirements that are merely constraints - return None - if req.is_wheel: - if not should_unpack: - logger.info( - 'Skipping %s, due to already being wheel.', req.name, - ) - return None - if not should_unpack: - # i.e. pip wheel, not pip install; - # return False, knowing that the caller will never cache - # in this case anyway, so this return merely means "build it". - # TODO improve this behavior - return False - - if req.editable or not req.source_dir: - return None - - if not check_binary_allowed(req): - logger.info( - "Skipping wheel build for %s, due to binaries " - "being disabled for it.", req.name, - ) - return None - - if req.link and req.link.is_vcs: - # VCS checkout. Build wheel just for this run. - return True - - link = req.link - base, ext = link.splitext() - if cache_available and _contains_egg_info(base): - return False - - # Otherwise, build the wheel just for this run using the ephemeral - # cache since we are either in the case of e.g. a local directory, or - # no cache directory is available to use. - return True - - -def format_command_result( - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> str - """ - Format command information for logging. - """ - command_desc = format_command_args(command_args) - text = 'Command arguments: {}\n'.format(command_desc) - - if not command_output: - text += 'Command output: None' - elif logger.getEffectiveLevel() > logging.DEBUG: - text += 'Command output: [use --verbose to show]' - else: - if not command_output.endswith('\n'): - command_output += '\n' - text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) - - return text - - -def get_legacy_build_wheel_path( - names, # type: List[str] - temp_dir, # type: str - req, # type: InstallRequirement - command_args, # type: List[str] - command_output, # type: str -): - # type: (...) -> Optional[str] - """ - Return the path to the wheel in the temporary build directory. - """ - # Sort for determinism. - names = sorted(names) - if not names: - msg = ( - 'Legacy build of wheel for {!r} created no files.\n' - ).format(req.name) - msg += format_command_result(command_args, command_output) - logger.warning(msg) - return None - - if len(names) > 1: - msg = ( - 'Legacy build of wheel for {!r} created more than one file.\n' - 'Filenames (choosing first): {}\n' - ).format(req.name, names) - msg += format_command_result(command_args, command_output) - logger.warning(msg) - - return os.path.join(temp_dir, names[0]) - - -def _always_true(_): - return True - - -class WheelBuilder(object): - """Build wheels from a RequirementSet.""" - - def __init__( - self, - preparer, # type: RequirementPreparer - wheel_cache, # type: WheelCache - build_options=None, # type: Optional[List[str]] - global_options=None, # type: Optional[List[str]] - check_binary_allowed=None, # type: Optional[BinaryAllowedPredicate] - no_clean=False # type: bool - ): - # type: (...) -> None - if check_binary_allowed is None: - # Binaries allowed by default. - check_binary_allowed = _always_true - - self.preparer = preparer - self.wheel_cache = wheel_cache - - self._wheel_dir = preparer.wheel_download_dir - - self.build_options = build_options or [] - self.global_options = global_options or [] - self.check_binary_allowed = check_binary_allowed - self.no_clean = no_clean - - def _build_one(self, req, output_dir, python_tag=None): - """Build one wheel. - - :return: The filename of the built wheel, or None if the build failed. - """ - # Install build deps into temporary directory (PEP 518) - with req.build_env: - return self._build_one_inside_env(req, output_dir, - python_tag=python_tag) - - def _build_one_inside_env(self, req, output_dir, python_tag=None): - with TempDirectory(kind="wheel") as temp_dir: - if req.use_pep517: - builder = self._build_one_pep517 - else: - builder = self._build_one_legacy - wheel_path = builder(req, temp_dir.path, python_tag=python_tag) - if wheel_path is not None: - wheel_name = os.path.basename(wheel_path) - dest_path = os.path.join(output_dir, wheel_name) - try: - wheel_hash, length = hash_file(wheel_path) - shutil.move(wheel_path, dest_path) - logger.info('Created wheel for %s: ' - 'filename=%s size=%d sha256=%s', - req.name, wheel_name, length, - wheel_hash.hexdigest()) - logger.info('Stored in directory: %s', output_dir) - return dest_path - except Exception: - pass - # Ignore return, we can't do anything else useful. - self._clean_one(req) - return None - - def _base_setup_args(self, req): - # NOTE: Eventually, we'd want to also -S to the flags here, when we're - # isolating. Currently, it breaks Python in virtualenvs, because it - # relies on site.py to find parts of the standard library outside the - # virtualenv. - return make_setuptools_shim_args( - req.setup_py_path, - global_options=self.global_options, - unbuffered_output=True - ) - - def _build_one_pep517(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the PEP 517 build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - assert req.metadata_directory is not None - if self.build_options: - # PEP 517 does not support --build-options - logger.error('Cannot build wheel for %s using PEP 517 when ' - '--build-options is present' % (req.name,)) - return None - try: - logger.debug('Destination directory: %s', tempd) - - runner = runner_with_spinner_message( - 'Building wheel for {} (PEP 517)'.format(req.name) - ) - backend = req.pep517_backend - with backend.subprocess_runner(runner): - wheel_name = backend.build_wheel( - tempd, - metadata_directory=req.metadata_directory, - ) - if python_tag: - # General PEP 517 backends don't necessarily support - # a "--python-tag" option, so we rename the wheel - # file directly. - new_name = replace_python_tag(wheel_name, python_tag) - os.rename( - os.path.join(tempd, wheel_name), - os.path.join(tempd, new_name) - ) - # Reassign to simplify the return at the end of function - wheel_name = new_name - except Exception: - logger.error('Failed building wheel for %s', req.name) - return None - return os.path.join(tempd, wheel_name) - - def _build_one_legacy(self, req, tempd, python_tag=None): - """Build one InstallRequirement using the "legacy" build process. - - Returns path to wheel if successfully built. Otherwise, returns None. - """ - base_args = self._base_setup_args(req) - - spin_message = 'Building wheel for %s (setup.py)' % (req.name,) - with open_spinner(spin_message) as spinner: - logger.debug('Destination directory: %s', tempd) - wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ - + self.build_options - - if python_tag is not None: - wheel_args += ["--python-tag", python_tag] - - try: - output = call_subprocess( - wheel_args, - cwd=req.unpacked_source_directory, - spinner=spinner, - ) - except Exception: - spinner.finish("error") - logger.error('Failed building wheel for %s', req.name) - return None - - names = os.listdir(tempd) - wheel_path = get_legacy_build_wheel_path( - names=names, - temp_dir=tempd, - req=req, - command_args=wheel_args, - command_output=output, - ) - return wheel_path - - def _clean_one(self, req): - base_args = self._base_setup_args(req) - - logger.info('Running setup.py clean for %s', req.name) - clean_args = base_args + ['clean', '--all'] - try: - call_subprocess(clean_args, cwd=req.source_dir) - return True - except Exception: - logger.error('Failed cleaning build dir for %s', req.name) - return False - - def build( - self, - requirements, # type: Iterable[InstallRequirement] - should_unpack=False # type: bool - ): - # type: (...) -> List[InstallRequirement] - """Build wheels. - - :param should_unpack: If True, after building the wheel, unpack it - and replace the sdist with the unpacked version in preparation - for installation. - :return: True if all the wheels built correctly. - """ - # pip install uses should_unpack=True. - # pip install never provides a _wheel_dir. - # pip wheel uses should_unpack=False. - # pip wheel always provides a _wheel_dir (via the preparer). - assert ( - (should_unpack and not self._wheel_dir) or - (not should_unpack and self._wheel_dir) - ) - - buildset = [] - cache_available = bool(self.wheel_cache.cache_dir) - - for req in requirements: - ephem_cache = should_use_ephemeral_cache( - req, - should_unpack=should_unpack, - cache_available=cache_available, - check_binary_allowed=self.check_binary_allowed, - ) - if ephem_cache is None: - continue - - # Determine where the wheel should go. - if should_unpack: - if ephem_cache: - output_dir = self.wheel_cache.get_ephem_path_for_link( - req.link - ) - else: - output_dir = self.wheel_cache.get_path_for_link(req.link) - else: - output_dir = self._wheel_dir - - buildset.append((req, output_dir)) - - if not buildset: - return [] - - # TODO by @pradyunsg - # Should break up this method into 2 separate methods. - - # Build the wheels. - logger.info( - 'Building wheels for collected packages: %s', - ', '.join([req.name for (req, _) in buildset]), - ) - - python_tag = None - if should_unpack: - python_tag = pep425tags.implementation_tag - - with indent_log(): - build_success, build_failure = [], [] - for req, output_dir in buildset: - try: - ensure_dir(output_dir) - except OSError as e: - logger.warning( - "Building wheel for %s failed: %s", - req.name, e, - ) - build_failure.append(req) - continue - - wheel_file = self._build_one( - req, output_dir, - python_tag=python_tag, - ) - if wheel_file: - build_success.append(req) - if should_unpack: - # XXX: This is mildly duplicative with prepare_files, - # but not close enough to pull out to a single common - # method. - # The code below assumes temporary source dirs - - # prevent it doing bad things. - if ( - req.source_dir and - not has_delete_marker_file(req.source_dir) - ): - raise AssertionError( - "bad source dir - missing marker") - # Delete the source we built the wheel from - req.remove_temporary_source() - # set the build directory again - name is known from - # the work prepare_files did. - req.source_dir = req.ensure_build_location( - self.preparer.build_dir - ) - # Update the link for this. - req.link = Link(path_to_url(wheel_file)) - assert req.link.is_wheel - # extract the wheel into the dir - unpack_file(req.link.file_path, req.source_dir) - else: - build_failure.append(req) - - # notify success/failure - if build_success: - logger.info( - 'Successfully built %s', - ' '.join([req.name for req in build_success]), - ) - if build_failure: - logger.info( - 'Failed to build %s', - ' '.join([req.name for req in build_failure]), - ) - # Return a list of requirements that failed to build - return build_failure diff --git a/pipenv/patched/notpip/_vendor/README.rst b/pipenv/patched/notpip/_vendor/README.rst deleted file mode 100644 index 38c306aab8..0000000000 --- a/pipenv/patched/notpip/_vendor/README.rst +++ /dev/null @@ -1,151 +0,0 @@ -Vendoring Policy -================ - -* Vendored libraries **MUST** not be modified except as required to - successfully vendor them. - -* Vendored libraries **MUST** be released copies of libraries available on - PyPI. - -* Vendored libraries **MUST** be accompanied with LICENSE files. - -* The versions of libraries vendored in pip **MUST** be reflected in - ``pip/_vendor/vendor.txt``. - -* Vendored libraries **MUST** function without any build steps such as ``2to3`` or - compilation of C code, practically this limits to single source 2.x/3.x and - pure Python. - -* Any modifications made to libraries **MUST** be noted in - ``pip/_vendor/README.rst`` and their corresponding patches **MUST** be - included ``tools/automation/vendoring/patches``. - -* Vendored libraries should have corresponding ``vendored()`` entries in - ``pip/_vendor/__init__.py``. - -Rationale ---------- - -Historically pip has not had any dependencies except for ``setuptools`` itself, -choosing instead to implement any functionality it needed to prevent needing -a dependency. However, starting with pip 1.5, we began to replace code that was -implemented inside of pip with reusable libraries from PyPI. This brought the -typical benefits of reusing libraries instead of reinventing the wheel like -higher quality and more battle tested code, centralization of bug fixes -(particularly security sensitive ones), and better/more features for less work. - -However, there are several issues with having dependencies in the traditional -way (via ``install_requires``) for pip. These issues are: - -* **Fragility.** When pip depends on another library to function then if for - whatever reason that library either isn't installed or an incompatible - version is installed then pip ceases to function. This is of course true for - all Python applications, however for every application *except* for pip the - way you fix it is by re-running pip. Obviously, when pip can't run, you can't - use pip to fix pip, so you're left having to manually resolve dependencies and - installing them by hand. - -* **Making other libraries uninstallable.** One of pip's current dependencies is - the ``requests`` library, for which pip requires a fairly recent version to run. - If pip depended on ``requests`` in the traditional manner, then we'd either - have to maintain compatibility with every ``requests`` version that has ever - existed (and ever will), OR allow pip to render certain versions of ``requests`` - uninstallable. (The second issue, although technically true for any Python - application, is magnified by pip's ubiquity; pip is installed by default in - Python, in ``pyvenv``, and in ``virtualenv``.) - -* **Security.** This might seem puzzling at first glance, since vendoring - has a tendency to complicate updating dependencies for security updates, - and that holds true for pip. However, given the *other* reasons for avoiding - dependencies, the alternative is for pip to reinvent the wheel itself. - This is what pip did historically. It forced pip to re-implement its own - HTTPS verification routines as a workaround for the Python standard library's - lack of SSL validation, which resulted in similar bugs in the validation routine - in ``requests`` and ``urllib3``, except that they had to be discovered and - fixed independently. Even though we're vendoring, reusing libraries keeps pip - more secure by relying on the great work of our dependencies, *and* allowing for - faster, easier security fixes by simply pulling in newer versions of dependencies. - -* **Bootstrapping.** Currently most popular methods of installing pip rely - on pip's self-contained nature to install pip itself. These tools work by bundling - a copy of pip, adding it to ``sys.path``, and then executing that copy of pip. - This is done instead of implementing a "mini installer" (to reduce duplication); - pip already knows how to install a Python package, and is far more battle-tested - than any "mini installer" could ever possibly be. - -Many downstream redistributors have policies against this kind of bundling, and -instead opt to patch the software they distribute to debundle it and make it -rely on the global versions of the software that they already have packaged -(which may have its own patches applied to it). We (the pip team) would prefer -it if pip was *not* debundled in this manner due to the above reasons and -instead we would prefer it if pip would be left intact as it is now. The one -exception to this, is it is acceptable to remove the -``pip/_vendor/requests/cacert.pem`` file provided you ensure that the -``ssl.get_default_verify_paths().cafile`` API returns the correct CA bundle for -your system. This will ensure that pip will use your system provided CA bundle -instead of the copy bundled with pip. - -In the longer term, if someone has a *portable* solution to the above problems, -other than the bundling method we currently use, that doesn't add additional -problems that are unreasonable then we would be happy to consider, and possibly -switch to said method. This solution must function correctly across all of the -situation that we expect pip to be used and not mandate some external mechanism -such as OS packages. - - -Modifications -------------- - -* ``setuptools`` is completely stripped to only keep ``pkg_resources`` -* ``pkg_resources`` has been modified to import its dependencies from ``pip._vendor`` -* ``packaging`` has been modified to import its dependencies from ``pip._vendor`` -* ``html5lib`` has been modified to import six from ``pip._vendor`` and - to prefer importing from ``collections.abc`` instead of ``collections``. -* ``CacheControl`` has been modified to import its dependencies from ``pip._vendor`` -* ``requests`` has been modified to import its other dependencies from ``pip._vendor`` - and to *not* load ``simplejson`` (all platforms) and ``pyopenssl`` (Windows). - - -Automatic Vendoring -------------------- - -Vendoring is automated via the ``vendoring`` tool from the content of -``pip/_vendor/vendor.txt`` and the different patches in -``tools/automation/vendoring/patches``. -Launch it via ``vendoring sync . -v`` (requires ``vendoring>=0.2.2``). - - -Debundling ----------- - -As mentioned in the rationale, we, the pip team, would prefer it if pip was not -debundled (other than optionally ``pip/_vendor/requests/cacert.pem``) and that -pip was left intact. However, if you insist on doing so, we have a -semi-supported method (that we don't test in our CI) and requires a bit of -extra work on your end in order to solve the problems described above. - -1. Delete everything in ``pip/_vendor/`` **except** for - ``pip/_vendor/__init__.py``. - -2. Generate wheels for each of pip's dependencies (and any of their - dependencies) using your patched copies of these libraries. These must be - placed somewhere on the filesystem that pip can access (``pip/_vendor`` is - the default assumption). - -3. Modify ``pip/_vendor/__init__.py`` so that the ``DEBUNDLED`` variable is - ``True``. - -4. Upon installation, the ``INSTALLER`` file in pip's own ``dist-info`` - directory should be set to something other than ``pip``, so that pip - can detect that it wasn't installed using itself. - -5. *(optional)* If you've placed the wheels in a location other than - ``pip/_vendor/``, then modify ``pip/_vendor/__init__.py`` so that the - ``WHEEL_DIR`` variable points to the location you've placed them. - -6. *(optional)* Update the ``pip_self_version_check`` logic to use the - appropriate logic for determining the latest available version of pip and - prompt the user with the correct upgrade message. - -Note that partial debundling is **NOT** supported. You need to prepare wheels -for all dependencies for successful debundling. diff --git a/pipenv/patched/notpip/_vendor/cachecontrol.pyi b/pipenv/patched/notpip/_vendor/cachecontrol.pyi deleted file mode 100644 index 636a66baca..0000000000 --- a/pipenv/patched/notpip/_vendor/cachecontrol.pyi +++ /dev/null @@ -1 +0,0 @@ -from cachecontrol import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/certifi.pyi b/pipenv/patched/notpip/_vendor/certifi.pyi deleted file mode 100644 index e5c4d3d2af..0000000000 --- a/pipenv/patched/notpip/_vendor/certifi.pyi +++ /dev/null @@ -1 +0,0 @@ -from certifi import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/chardet.pyi b/pipenv/patched/notpip/_vendor/chardet.pyi deleted file mode 100644 index 29e87e3315..0000000000 --- a/pipenv/patched/notpip/_vendor/chardet.pyi +++ /dev/null @@ -1 +0,0 @@ -from chardet import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/colorama.pyi b/pipenv/patched/notpip/_vendor/colorama.pyi deleted file mode 100644 index 60a6c2541f..0000000000 --- a/pipenv/patched/notpip/_vendor/colorama.pyi +++ /dev/null @@ -1 +0,0 @@ -from colorama import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/distlib.pyi b/pipenv/patched/notpip/_vendor/distlib.pyi deleted file mode 100644 index ea94b159a6..0000000000 --- a/pipenv/patched/notpip/_vendor/distlib.pyi +++ /dev/null @@ -1 +0,0 @@ -from distlib import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/distro.pyi b/pipenv/patched/notpip/_vendor/distro.pyi deleted file mode 100644 index c7ea94b37b..0000000000 --- a/pipenv/patched/notpip/_vendor/distro.pyi +++ /dev/null @@ -1 +0,0 @@ -from distro import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/html5lib.pyi b/pipenv/patched/notpip/_vendor/html5lib.pyi deleted file mode 100644 index 9bc9af95e3..0000000000 --- a/pipenv/patched/notpip/_vendor/html5lib.pyi +++ /dev/null @@ -1 +0,0 @@ -from html5lib import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/idna.pyi b/pipenv/patched/notpip/_vendor/idna.pyi deleted file mode 100644 index 7410d72fe7..0000000000 --- a/pipenv/patched/notpip/_vendor/idna.pyi +++ /dev/null @@ -1 +0,0 @@ -from idna import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/ipaddress.pyi b/pipenv/patched/notpip/_vendor/ipaddress.pyi deleted file mode 100644 index eef994d945..0000000000 --- a/pipenv/patched/notpip/_vendor/ipaddress.pyi +++ /dev/null @@ -1 +0,0 @@ -from ipaddress import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/msgpack.pyi b/pipenv/patched/notpip/_vendor/msgpack.pyi deleted file mode 100644 index 4e69b88679..0000000000 --- a/pipenv/patched/notpip/_vendor/msgpack.pyi +++ /dev/null @@ -1 +0,0 @@ -from msgpack import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/packaging.pyi b/pipenv/patched/notpip/_vendor/packaging.pyi deleted file mode 100644 index 3458a3d637..0000000000 --- a/pipenv/patched/notpip/_vendor/packaging.pyi +++ /dev/null @@ -1 +0,0 @@ -from packaging import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE b/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE index f433b1a53f..4947287f7b 100644 --- a/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE +++ b/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE @@ -174,4 +174,4 @@ incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - END OF TERMS AND CONDITIONS + END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/packaging/py.typed b/pipenv/patched/notpip/_vendor/packaging/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pipenv/patched/notpip/_vendor/pep517.pyi b/pipenv/patched/notpip/_vendor/pep517.pyi deleted file mode 100644 index d1ce810290..0000000000 --- a/pipenv/patched/notpip/_vendor/pep517.pyi +++ /dev/null @@ -1 +0,0 @@ -from pep517 import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pkg_resources.pyi b/pipenv/patched/notpip/_vendor/pkg_resources.pyi deleted file mode 100644 index 477030314f..0000000000 --- a/pipenv/patched/notpip/_vendor/pkg_resources.pyi +++ /dev/null @@ -1 +0,0 @@ -from pkg_resources import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/progress.pyi b/pipenv/patched/notpip/_vendor/progress.pyi deleted file mode 100644 index c92de832bb..0000000000 --- a/pipenv/patched/notpip/_vendor/progress.pyi +++ /dev/null @@ -1 +0,0 @@ -from progress import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pyparsing.pyi b/pipenv/patched/notpip/_vendor/pyparsing.pyi deleted file mode 100644 index 8e9de6b021..0000000000 --- a/pipenv/patched/notpip/_vendor/pyparsing.pyi +++ /dev/null @@ -1 +0,0 @@ -from pyparsing import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/pytoml.pyi b/pipenv/patched/notpip/_vendor/pytoml.pyi deleted file mode 100644 index 5566ee8972..0000000000 --- a/pipenv/patched/notpip/_vendor/pytoml.pyi +++ /dev/null @@ -1 +0,0 @@ -from pytoml import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/requests.pyi b/pipenv/patched/notpip/_vendor/requests.pyi deleted file mode 100644 index 6d69cd6fac..0000000000 --- a/pipenv/patched/notpip/_vendor/requests.pyi +++ /dev/null @@ -1 +0,0 @@ -from requests import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/retrying.pyi b/pipenv/patched/notpip/_vendor/retrying.pyi deleted file mode 100644 index 90f20c6dbc..0000000000 --- a/pipenv/patched/notpip/_vendor/retrying.pyi +++ /dev/null @@ -1 +0,0 @@ -from retrying import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/six.LICENSE b/pipenv/patched/notpip/_vendor/six.LICENSE index de6633112c..365d10741b 100644 --- a/pipenv/patched/notpip/_vendor/six.LICENSE +++ b/pipenv/patched/notpip/_vendor/six.LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2010-2020 Benjamin Peterson +Copyright (c) 2010-2018 Benjamin Peterson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/pipenv/patched/notpip/_vendor/six/__init__.pyi b/pipenv/patched/notpip/_vendor/six/__init__.pyi deleted file mode 100644 index e5c0e24227..0000000000 --- a/pipenv/patched/notpip/_vendor/six/__init__.pyi +++ /dev/null @@ -1 +0,0 @@ -from six import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/six/moves/__init__.pyi b/pipenv/patched/notpip/_vendor/six/moves/__init__.pyi deleted file mode 100644 index 7a82f79db6..0000000000 --- a/pipenv/patched/notpip/_vendor/six/moves/__init__.pyi +++ /dev/null @@ -1 +0,0 @@ -from six.moves import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/six/moves/configparser.pyi b/pipenv/patched/notpip/_vendor/six/moves/configparser.pyi deleted file mode 100644 index f77b3f4105..0000000000 --- a/pipenv/patched/notpip/_vendor/six/moves/configparser.pyi +++ /dev/null @@ -1 +0,0 @@ -from six.moves.configparser import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/urllib3.pyi b/pipenv/patched/notpip/_vendor/urllib3.pyi deleted file mode 100644 index 7e8a2a7085..0000000000 --- a/pipenv/patched/notpip/_vendor/urllib3.pyi +++ /dev/null @@ -1 +0,0 @@ -from urllib3 import * \ No newline at end of file diff --git a/pipenv/patched/notpip/_vendor/vendor.txt b/pipenv/patched/notpip/_vendor/vendor.txt index cbc2830ac0..aadd35261a 100644 --- a/pipenv/patched/notpip/_vendor/vendor.txt +++ b/pipenv/patched/notpip/_vendor/vendor.txt @@ -1,23 +1,23 @@ appdirs==1.4.3 -CacheControl==0.12.6 -colorama==0.4.3 +CacheControl==0.12.5 +colorama==0.4.1 contextlib2==0.6.0 -distlib==0.3.0 +distlib==0.2.9.post0 distro==1.4.0 html5lib==1.0.1 -ipaddress==1.0.23 # Only needed on 2.6 and 2.7 +ipaddress==1.0.22 # Only needed on 2.6 and 2.7 msgpack==0.6.2 -packaging==20.1 +packaging==19.2 pep517==0.7.0 progress==1.5 -pyparsing==2.4.6 +pyparsing==2.4.2 pytoml==0.1.21 requests==2.22.0 - certifi==2019.11.28 + certifi==2019.9.11 chardet==3.0.4 idna==2.8 - urllib3==1.25.7 + urllib3==1.25.6 retrying==1.3.3 -setuptools==44.0.0 -six==1.14.0 +setuptools==41.4.0 +six==1.12.0 webencodings==0.5.1 diff --git a/pipenv/patched/notpip/_vendor/webencodings.pyi b/pipenv/patched/notpip/_vendor/webencodings.pyi deleted file mode 100644 index a11db4d82c..0000000000 --- a/pipenv/patched/notpip/_vendor/webencodings.pyi +++ /dev/null @@ -1 +0,0 @@ -from webencodings import * \ No newline at end of file diff --git a/pipenv/patched/notpip/appdirs.LICENSE.txt b/pipenv/patched/notpip/appdirs.LICENSE.txt deleted file mode 100644 index 107c61405e..0000000000 --- a/pipenv/patched/notpip/appdirs.LICENSE.txt +++ /dev/null @@ -1,23 +0,0 @@ -# This is the MIT license - -Copyright (c) 2010 ActiveState Software Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/pipenv/patched/notpip/contextlib2.LICENSE.txt b/pipenv/patched/notpip/contextlib2.LICENSE.txt deleted file mode 100644 index 5de20277df..0000000000 --- a/pipenv/patched/notpip/contextlib2.LICENSE.txt +++ /dev/null @@ -1,122 +0,0 @@ - - -A. HISTORY OF THE SOFTWARE -========================== - -contextlib2 is a derivative of the contextlib module distributed by the PSF -as part of the Python standard library. According, it is itself redistributed -under the PSF license (reproduced in full below). As the contextlib module -was added only in Python 2.5, the licenses for earlier Python versions are -not applicable and have not been included. - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases that included the contextlib module. - - Release Derived Year Owner GPL- - from compatible? (1) - - 2.5 2.4 2006 PSF yes - 2.5.1 2.5 2007 PSF yes - 2.5.2 2.5.1 2008 PSF yes - 2.5.3 2.5.2 2008 PSF yes - 2.6 2.5 2008 PSF yes - 2.6.1 2.6 2008 PSF yes - 2.6.2 2.6.1 2009 PSF yes - 2.6.3 2.6.2 2009 PSF yes - 2.6.4 2.6.3 2009 PSF yes - 2.6.5 2.6.4 2010 PSF yes - 3.0 2.6 2008 PSF yes - 3.0.1 3.0 2009 PSF yes - 3.1 3.0.1 2009 PSF yes - 3.1.1 3.1 2009 PSF yes - 3.1.2 3.1.1 2010 PSF yes - 3.1.3 3.1.2 2010 PSF yes - 3.1.4 3.1.3 2011 PSF yes - 3.2 3.1 2011 PSF yes - 3.2.1 3.2 2011 PSF yes - 3.2.2 3.2.1 2011 PSF yes - 3.3 3.2 2012 PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011 Python Software Foundation; All Rights Reserved" are retained in Python -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/pipenv/patched/notpip/distro.LICENSE b/pipenv/patched/notpip/distro.LICENSE deleted file mode 100644 index e06d208186..0000000000 --- a/pipenv/patched/notpip/distro.LICENSE +++ /dev/null @@ -1,202 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/pipenv/patched/notpip/idna.LICENSE.rst b/pipenv/patched/notpip/idna.LICENSE.rst deleted file mode 100644 index 3ee64fba29..0000000000 --- a/pipenv/patched/notpip/idna.LICENSE.rst +++ /dev/null @@ -1,80 +0,0 @@ -License -------- - -Copyright (c) 2013-2018, Kim Davies. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - -Portions of the codec implementation and unit tests are derived from the -Python standard library, which carries the `Python Software Foundation -License `_: - - Copyright (c) 2001-2014 Python Software Foundation; All Rights Reserved - -Portions of the unit tests are derived from the Unicode standard, which -is subject to the Unicode, Inc. License Agreement: - - Copyright (c) 1991-2014 Unicode, Inc. All rights reserved. - Distributed under the Terms of Use in - . - - Permission is hereby granted, free of charge, to any person obtaining - a copy of the Unicode data files and any associated documentation - (the "Data Files") or Unicode software and any associated documentation - (the "Software") to deal in the Data Files or Software - without restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, and/or sell copies of - the Data Files or Software, and to permit persons to whom the Data Files - or Software are furnished to do so, provided that - - (a) this copyright and permission notice appear with all copies - of the Data Files or Software, - - (b) this copyright and permission notice appear in associated - documentation, and - - (c) there is clear notice in each modified Data File or in the Software - as well as in the documentation associated with the Data File(s) or - Software that the data or software has been modified. - - THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF - ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE - WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT OF THIRD PARTY RIGHTS. - IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS - NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL - DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, - DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - PERFORMANCE OF THE DATA FILES OR SOFTWARE. - - Except as contained in this notice, the name of a copyright holder - shall not be used in advertising or otherwise to promote the sale, - use or other dealings in these Data Files or Software without prior - written authorization of the copyright holder. diff --git a/pipenv/patched/notpip/ipaddress.LICENSE b/pipenv/patched/notpip/ipaddress.LICENSE deleted file mode 100644 index 41bd16ba6c..0000000000 --- a/pipenv/patched/notpip/ipaddress.LICENSE +++ /dev/null @@ -1,50 +0,0 @@ -This package is a modified version of cpython's ipaddress module. -It is therefore distributed under the PSF license, as follows: - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are -retained in Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/pipenv/patched/notpip/msgpack.COPYING b/pipenv/patched/notpip/msgpack.COPYING deleted file mode 100644 index f067af3aae..0000000000 --- a/pipenv/patched/notpip/msgpack.COPYING +++ /dev/null @@ -1,14 +0,0 @@ -Copyright (C) 2008-2011 INADA Naoki - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/pipenv/patched/notpip/packaging.LICENSE.APACHE b/pipenv/patched/notpip/packaging.LICENSE.APACHE deleted file mode 100644 index 4947287f7b..0000000000 --- a/pipenv/patched/notpip/packaging.LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/pipenv/patched/notpip/packaging.LICENSE.BSD b/pipenv/patched/notpip/packaging.LICENSE.BSD deleted file mode 100644 index 42ce7b75c9..0000000000 --- a/pipenv/patched/notpip/packaging.LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/notpip/pyparsing.LICENSE b/pipenv/patched/notpip/pyparsing.LICENSE deleted file mode 100644 index 1bf98523e3..0000000000 --- a/pipenv/patched/notpip/pyparsing.LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pipenv/patched/notpip/retrying.LICENSE b/pipenv/patched/notpip/retrying.LICENSE deleted file mode 100644 index 7a4a3ea242..0000000000 --- a/pipenv/patched/notpip/retrying.LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/pipenv/patched/notpip/six.LICENSE b/pipenv/patched/notpip/six.LICENSE deleted file mode 100644 index 365d10741b..0000000000 --- a/pipenv/patched/notpip/six.LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright (c) 2010-2018 Benjamin Peterson - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pipenv/patched/notpip/webencodings.LICENSE b/pipenv/patched/notpip/webencodings.LICENSE deleted file mode 100644 index 3d0d3e7059..0000000000 --- a/pipenv/patched/notpip/webencodings.LICENSE +++ /dev/null @@ -1,31 +0,0 @@ -Copyright (c) 2012 by Simon Sapin. - -Some rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/piptools/scripts/compile.py b/pipenv/patched/piptools/scripts/compile.py old mode 100755 new mode 100644 diff --git a/pipenv/patched/piptools/scripts/sync.py b/pipenv/patched/piptools/scripts/sync.py old mode 100755 new mode 100644 diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index aa93ec88ac..28ece192c4 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -141,13 +141,13 @@ def _requirement_to_str_lowercase_name(requirement): Formats a packaging.requirements.Requirement with a lowercase name. This is simply a copy of - https://github.com/pypa/packaging/blob/16.8/packaging/requirements.py#L109-L124 + https://github.com/pypa/pipenv/patched/packaging/blob/pipenv/patched/16.8/packaging/requirements.py#L109-L124 modified to lowercase the dependency name. Previously, we were invoking the original Requirement.__str__ method and lowercasing the entire result, which would lowercase the name, *and* other, important stuff that should not be lowercased (such as the marker). See - this issue for more information: https://github.com/pypa/pipenv/issues/2113. + this issue for more information: https://github.com/pypa/pipenv/patched/pipenv/issues/2113. """ parts = [requirement.name.lower()] diff --git a/pipenv/patched/safety.LICENSE b/pipenv/patched/safety/LICENSE similarity index 99% rename from pipenv/patched/safety.LICENSE rename to pipenv/patched/safety/LICENSE index c5fda55836..55a1eb037e 100644 --- a/pipenv/patched/safety.LICENSE +++ b/pipenv/patched/safety/LICENSE @@ -1,3 +1,4 @@ + MIT License Copyright (c) 2016, pyup.io diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py index 3a0188c9f1..795c4872f9 100644 --- a/pipenv/vendor/pip_shims/__init__.py +++ b/pipenv/vendor/pip_shims/__init__.py @@ -1,4 +1,24 @@ # -*- coding=utf-8 -*- +""" +This library is a set of compatibilty access shims to the ``pip`` internal API. +It provides compatibility with pip versions 8.0 through the current release. The +shims are provided using a lazy import strategy by hacking a module by overloading +a class instance's ``getattr`` method. This library exists due to my constant +writing of the same set of import shims. + +Submodules +========== + +.. autosummary:: + :toctree: _autosummary + + pip_shims.models + pip_shims.compat + pip_shims.utils + pip_shims.shims + pip_shims.environment + +""" from __future__ import absolute_import import sys diff --git a/pipenv/vendor/pip_shims/compat.py b/pipenv/vendor/pip_shims/compat.py index 0c125321a6..d8f409e19f 100644 --- a/pipenv/vendor/pip_shims/compat.py +++ b/pipenv/vendor/pip_shims/compat.py @@ -720,7 +720,7 @@ def shim_unpack( :type unpack_fn: Callable :param str download_dir: The directory to download the file to :param TShimmedFunc tempdir_manager_provider: A callable or shim referring to - `global_tempdir_manager` function from pip or a shimmed no-op context manager + `global_tempdir_manager` function from pipenv.patched.notpip or a shimmed no-op context manager :param Optional[:class:`~pip._internal.req.req_install.InstallRequirement`] ireq: an Install Requirement instance, defaults to None :param Optional[:class:`~pip._internal.models.link.Link`] link: A Link instance, From ca1da4b295c7c188760bc1f7e3deaba3d4e41329 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 16:59:53 -0400 Subject: [PATCH 06/12] Fix broken pip patch Signed-off-by: Dan Ryan --- tasks/vendoring/patches/patched/pip20.patch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasks/vendoring/patches/patched/pip20.patch b/tasks/vendoring/patches/patched/pip20.patch index 940647f0f3..9fa79b0fda 100644 --- a/tasks/vendoring/patches/patched/pip20.patch +++ b/tasks/vendoring/patches/patched/pip20.patch @@ -363,7 +363,7 @@ index 1dc1a576..4cc06026 100644 - def __init__(self, name, version, link): - # type: (str, str, Link) -> None -+ def __init__(self, name, version, link, requies_python=None): ++ def __init__(self, name, version, link, requires_python=None): + # type: (str, str, Link, Any) -> None self.name = name self.version = parse_version(version) # type: _BaseVersion From 580056f8ee8b88d054048b760f12df39a0d79aad Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 17:04:15 -0400 Subject: [PATCH 07/12] Update license download script to accurately name licenses Signed-off-by: Dan Ryan --- tasks/vendoring/__init__.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 3c0cb22b35..31f2203cf7 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -437,15 +437,20 @@ def install(ctx, vendor_dir, package=None): license_file.read_text() ) elif vendor_dir.joinpath("{0}.py".format(pkg)).exists(): - vendor_dir.joinpath("{0}.py.LICENSE".format(pkg)).write_text( + vendor_dir.joinpath("{0}.LICENSE".format(pkg)).write_text( license_file.read_text() ) else: + pkg = pkg.replace("-", "?").replace("_", "?") matched_path = next( iter(pth for pth in vendor_dir.glob("{0}*".format(pkg))), None ) if matched_path is not None: - vendor_dir.joinpath("{0}.LICENSE".format(matched_path)).write_text( + if matched_path.is_dir(): + target = vendor_dir.joinpath(matched_path).joinpath("LICENSE") + else: + target = vendor_dir.joinpath("{0}.LICENSE".format(matched_path)) + target.write_text( license_file.read_text() ) @@ -525,8 +530,11 @@ def vendor(ctx, vendor_dir, package=None, rewrite=True): @invoke.task -def redo_imports(ctx, library): - vendor_dir = _get_vendor_dir(ctx) +def redo_imports(ctx, library, vendor_dir=None): + if vendor_dir is None: + vendor_dir = _get_vendor_dir(ctx) + else: + vendor_dir = Path(vendor_dir).absolute() log("Using vendor dir: %s" % vendor_dir) vendored_libs = detect_vendored_libs(vendor_dir) item = vendor_dir / library @@ -538,7 +546,6 @@ def redo_imports(ctx, library): else: rewrite_file_imports(library_name, vendored_libs, vendor_dir) - @invoke.task def rewrite_all_imports(ctx): vendor_dir = _get_vendor_dir(ctx) From d0181e3ab91d1ab401d2d4ad32714dc94b45d03a Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 Apr 2020 17:48:57 -0400 Subject: [PATCH 08/12] Update patched test Signed-off-by: Dan Ryan --- tests/unit/test_patched.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_patched.py b/tests/unit/test_patched.py index f358191538..69461e246b 100644 --- a/tests/unit/test_patched.py +++ b/tests/unit/test_patched.py @@ -2,7 +2,7 @@ import pytest -from notpip._internal.index import PackageFinder +from notpip._internal.index.package_finder import PackageFinder get_extras_links_scenarios = { From 76576761c6ad0a22662d5921cf6b1a9d1563a42d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 28 Apr 2020 11:47:00 -0400 Subject: [PATCH 09/12] Allow incompatible links during hash lookup Signed-off-by: Dan Ryan --- pipenv/patched/piptools/repositories/pypi.py | 36 +++++++----- .../vendoring/patches/patched/piptools.patch | 56 ++++++++++++------- 2 files changed, 59 insertions(+), 33 deletions(-) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index fe6e7350b4..e81af1d501 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -52,7 +52,7 @@ ) from .base import BaseRepository -os.environ["PIP_SHIMS_BASE_MODULE"] = str("pip") +os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") FILE_CHUNK_SIZE = 4096 FileStream = collections.namedtuple("FileStream", "stream size") @@ -128,7 +128,7 @@ def __init__(self, pip_args, cache_dir=CACHE_DIR, session=None, build_isolation= session = self.command._build_session(self.options) self.session = session self.finder = self.command._build_package_finder( - options=self.options, session=self.session + options=self.options, session=self.session, ignore_requires_python=True ) # Caches @@ -296,7 +296,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache): wheel_cache=wheel_cache, use_user_site=False, ignore_installed=True, - ignore_requires_python=False, + ignore_requires_python=True, force_reinstall=False, upgrade_strategy="to-satisfy-only", ) @@ -393,18 +393,28 @@ def get_hashes(self, ireq): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. - matching_candidates = ( - c for c in clean_requires_python(self.find_all_candidates(ireq.name)) - if c.version in ireq.specifier - ) - log.debug(" {}".format(ireq.name)) + result = {} + with self.allow_all_links(): + matching_candidates = ( + c for c in clean_requires_python(self.find_all_candidates(ireq.name)) + if c.version in ireq.specifier + ) + log.debug(" {}".format(ireq.name)) + result = { + h for h in + map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) + if h is not None + } + return result - return { - h for h in - map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) - if h is not None - } + @contextmanager + def allow_all_links(self): + try: + self.finder._ignore_compatibility = True + yield + finally: + self.finder._ignore_compatibility = False @contextmanager def allow_all_wheels(self): diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 65378168cc..ebd94723a5 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -185,7 +185,7 @@ index ec3a796..1aa29f0 100644 else: return self.repository.find_best_match(ireq, prereleases) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py -index ef5ba4e..fe6e735 100644 +index ef5ba4e..b96acf6 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -2,28 +2,48 @@ @@ -254,7 +254,7 @@ index ef5ba4e..fe6e735 100644 ) from .base import BaseRepository -+os.environ["PIP_SHIMS_BASE_MODULE"] = str("pip") ++os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") FILE_CHUNK_SIZE = 4096 FileStream = collections.namedtuple("FileStream", "stream size") @@ -301,7 +301,7 @@ index ef5ba4e..fe6e735 100644 class PyPIRepository(BaseRepository): DEFAULT_INDEX_URL = PyPI.simple_url -@@ -46,19 +106,27 @@ class PyPIRepository(BaseRepository): +@@ -46,21 +106,29 @@ class PyPIRepository(BaseRepository): changed/configured on the Finder. """ @@ -330,8 +330,11 @@ index ef5ba4e..fe6e735 100644 + session = self.command._build_session(self.options) + self.session = session self.finder = self.command._build_package_finder( - options=self.options, session=self.session +- options=self.options, session=self.session ++ options=self.options, session=self.session, ignore_requires_python=True ) + + # Caches @@ -73,6 +141,10 @@ class PyPIRepository(BaseRepository): # of all secondary dependencies for the given requirement, so we # only have to go to disk once for each requirement @@ -465,7 +468,7 @@ index ef5ba4e..fe6e735 100644 if not is_pinned_requirement(ireq): raise TypeError("Expected pinned requirement, got {}".format(ireq)) -@@ -260,39 +393,19 @@ class PyPIRepository(BaseRepository): +@@ -260,38 +393,28 @@ class PyPIRepository(BaseRepository): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. @@ -473,21 +476,15 @@ index ef5ba4e..fe6e735 100644 - candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version) - matching_versions = list( - ireq.specifier.filter((candidate.version for candidate in all_candidates)) -+ matching_candidates = ( -+ c for c in clean_requires_python(self.find_all_candidates(ireq.name)) -+ if c.version in ireq.specifier - ) +- ) - matching_candidates = candidates_by_version[matching_versions[0]] +- +- log.debug(" {}".format(ireq.name)) - log.debug(" {}".format(ireq.name)) - - return { +- return { - self._get_file_hash(candidate.link) for candidate in matching_candidates -+ h for h in -+ map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) -+ if h is not None - } - +- } +- - def _get_file_hash(self, link): - log.debug(" Hashing {}".format(link.url_without_fragment)) - h = hashlib.new(FAVORITE_HASH) @@ -501,16 +498,35 @@ index ef5ba4e..fe6e735 100644 - context_manager = progressbar(chunks, length=iter_length, label=" ") - else: - context_manager = contextlib.nullcontext(chunks) -- ++ result = {} ++ with self.allow_all_links(): ++ matching_candidates = ( ++ c for c in clean_requires_python(self.find_all_candidates(ireq.name)) ++ if c.version in ireq.specifier ++ ) ++ log.debug(" {}".format(ireq.name)) ++ result = { ++ h for h in ++ map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates) ++ if h is not None ++ } ++ return result + - # Iterate over the chosen context manager - with context_manager as bar: - for chunk in bar: - h.update(chunk) - return ":".join([FAVORITE_HASH, h.hexdigest()]) -- ++ @contextmanager ++ def allow_all_links(self): ++ try: ++ self.finder._ignore_compatibility = True ++ yield ++ finally: ++ self.finder._ignore_compatibility = False + @contextmanager def allow_all_wheels(self): - """ diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py index 0116992..550069d 100644 --- a/pipenv/patched/piptools/resolver.py From c19435e6daaac769fbd8d6e2ac9adb8cb8d795cd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 28 Apr 2020 11:47:16 -0400 Subject: [PATCH 10/12] Fix relative path issue during tests Signed-off-by: Dan Ryan --- tests/integration/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 18e38878cb..ef91f5031f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -210,7 +210,7 @@ def isolate(create_tmpdir): os.environ["GIT_ASK_YESNO"] = fs_str("false") workon_home = create_tmpdir() os.environ["WORKON_HOME"] = fs_str(str(workon_home)) - os.environ["HOME"] = home_dir + os.environ["HOME"] = os.path.abspath(home_dir) mkdir_p(os.path.join(home_dir, "projects")) # Ignore PIPENV_ACTIVE so that it works as under a bare environment. os.environ.pop("PIPENV_ACTIVE", None) From d4a86d0ca9a7ed4003d427be1d5719514b144cdd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 28 Apr 2020 12:09:44 -0400 Subject: [PATCH 11/12] Add news entry. Signed-off-by: Dan Ryan --- news/4215.vendor.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/4215.vendor.rst diff --git a/news/4215.vendor.rst b/news/4215.vendor.rst new file mode 100644 index 0000000000..20a34b0244 --- /dev/null +++ b/news/4215.vendor.rst @@ -0,0 +1 @@ +Updated vendored ``pip`` => ``20.0.2`` and ``pip-tools`` => ``5.0.0``. From 5b2eddcc6534ec77c4d3228d555f9b707ab1b50d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 28 Apr 2020 14:28:21 -0400 Subject: [PATCH 12/12] Add build dir even though it's in gitignore Signed-off-by: Dan Ryan --- .../_internal/operations/build/__init__.py | 0 .../_internal/operations/build/metadata.py | 40 ++++++ .../operations/build/metadata_legacy.py | 122 ++++++++++++++++++ .../_internal/operations/build/wheel.py | 46 +++++++ .../operations/build/wheel_legacy.py | 115 +++++++++++++++++ 5 files changed, 323 insertions(+) create mode 100644 pipenv/patched/notpip/_internal/operations/build/__init__.py create mode 100644 pipenv/patched/notpip/_internal/operations/build/metadata.py create mode 100644 pipenv/patched/notpip/_internal/operations/build/metadata_legacy.py create mode 100644 pipenv/patched/notpip/_internal/operations/build/wheel.py create mode 100644 pipenv/patched/notpip/_internal/operations/build/wheel_legacy.py diff --git a/pipenv/patched/notpip/_internal/operations/build/__init__.py b/pipenv/patched/notpip/_internal/operations/build/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/patched/notpip/_internal/operations/build/metadata.py b/pipenv/patched/notpip/_internal/operations/build/metadata.py new file mode 100644 index 0000000000..b3ad90c6cf --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/build/metadata.py @@ -0,0 +1,40 @@ +"""Metadata generation logic for source distributions. +""" + +import logging +import os + +from pipenv.patched.notpip._internal.utils.subprocess import runner_with_spinner_message +from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pipenv.patched.notpip._internal.build_env import BuildEnvironment + from pipenv.patched.notpip._vendor.pep517.wrappers import Pep517HookCaller + +logger = logging.getLogger(__name__) + + +def generate_metadata(build_env, backend): + # type: (BuildEnvironment, Pep517HookCaller) -> str + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory( + kind="modern-metadata", globally_managed=True + ) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing wheel metadata") + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_wheel( + metadata_dir + ) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/pipenv/patched/notpip/_internal/operations/build/metadata_legacy.py b/pipenv/patched/notpip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 0000000000..9c84151df1 --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,122 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pipenv.patched.notpip._internal.exceptions import InstallationError +from pipenv.patched.notpip._internal.utils.misc import ensure_dir +from pipenv.patched.notpip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pipenv.patched.notpip._internal.utils.subprocess import call_subprocess +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + + from pipenv.patched.notpip._internal.build_env import BuildEnvironment + +logger = logging.getLogger(__name__) + + +def _find_egg_info(source_directory, is_editable): + # type: (str, bool) -> str + """Find an .egg-info in `source_directory`, based on `is_editable`. + """ + + def looks_like_virtual_env(path): + # type: (str) -> bool + return ( + os.path.lexists(os.path.join(path, 'bin', 'python')) or + os.path.exists(os.path.join(path, 'Scripts', 'Python.exe')) + ) + + def locate_editable_egg_info(base): + # type: (str) -> List[str] + candidates = [] # type: List[str] + for root, dirs, files in os.walk(base): + for dir_ in vcs.dirnames: + if dir_ in dirs: + dirs.remove(dir_) + # Iterate over a copy of ``dirs``, since mutating + # a list while iterating over it can cause trouble. + # (See https://github.com/pypa/pip/pull/462.) + for dir_ in list(dirs): + if looks_like_virtual_env(os.path.join(root, dir_)): + dirs.remove(dir_) + # Also don't search through tests + elif dir_ == 'test' or dir_ == 'tests': + dirs.remove(dir_) + candidates.extend(os.path.join(root, dir_) for dir_ in dirs) + return [f for f in candidates if f.endswith('.egg-info')] + + def depth_of_directory(dir_): + # type: (str) -> int + return ( + dir_.count(os.path.sep) + + (os.path.altsep and dir_.count(os.path.altsep) or 0) + ) + + base = source_directory + if is_editable: + filenames = locate_editable_egg_info(base) + else: + base = os.path.join(base, 'pip-egg-info') + filenames = os.listdir(base) + + if not filenames: + raise InstallationError( + "Files/directories not found in {}".format(base) + ) + + # If we have more than one match, we pick the toplevel one. This + # can easily be the case if there is a dist folder which contains + # an extracted tarball for testing purposes. + if len(filenames) > 1: + filenames.sort(key=depth_of_directory) + + return os.path.join(base, filenames[0]) + + +def generate_metadata( + build_env, # type: BuildEnvironment + setup_py_path, # type: str + source_dir, # type: str + editable, # type: bool + isolated, # type: bool + details, # type: str +): + # type: (...) -> str + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + setup_py_path, details, + ) + + egg_info_dir = None # type: Optional[str] + # For non-editable installs, don't put the .egg-info files at the root, + # to avoid confusion due to the source code being considered an installed + # egg. + if not editable: + egg_info_dir = os.path.join(source_dir, 'pip-egg-info') + # setuptools complains if the target directory does not exist. + ensure_dir(egg_info_dir) + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + call_subprocess( + args, + cwd=source_dir, + command_desc='python setup.py egg_info', + ) + + # Return the .egg-info directory. + return _find_egg_info(source_dir, editable) diff --git a/pipenv/patched/notpip/_internal/operations/build/wheel.py b/pipenv/patched/notpip/_internal/operations/build/wheel.py new file mode 100644 index 0000000000..39e97d5291 --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/build/wheel.py @@ -0,0 +1,46 @@ +import logging +import os + +from pipenv.patched.notpip._internal.utils.subprocess import runner_with_spinner_message +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + from pipenv.patched.notpip._vendor.pep517.wrappers import Pep517HookCaller + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name, # type: str + backend, # type: Pep517HookCaller + metadata_directory, # type: str + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + if build_options: + # PEP 517 does not support --build-options + logger.error('Cannot build wheel for %s using PEP 517 when ' + '--build-option is present' % (name,)) + return None + try: + logger.debug('Destination directory: %s', tempd) + + runner = runner_with_spinner_message( + 'Building wheel for {} (PEP 517)'.format(name) + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error('Failed building wheel for %s', name) + return None + return os.path.join(tempd, wheel_name) diff --git a/pipenv/patched/notpip/_internal/operations/build/wheel_legacy.py b/pipenv/patched/notpip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 0000000000..0d6183afe0 --- /dev/null +++ b/pipenv/patched/notpip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,115 @@ +import logging +import os.path + +from pipenv.patched.notpip._internal.utils.setuptools_build import ( + make_setuptools_bdist_wheel_args, +) +from pipenv.patched.notpip._internal.utils.subprocess import ( + LOG_DIVIDER, + call_subprocess, + format_command_args, +) +from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING +from pipenv.patched.notpip._internal.utils.ui import open_spinner + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Text + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> str + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = 'Command arguments: {}\n'.format(command_desc) + + if not command_output: + text += 'Command output: None' + elif logger.getEffectiveLevel() > logging.DEBUG: + text += 'Command output: [use --verbose to show]' + else: + if not command_output.endswith('\n'): + command_output += '\n' + text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) + + return text + + +def get_legacy_build_wheel_path( + names, # type: List[str] + temp_dir, # type: str + name, # type: str + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> Optional[str] + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ( + 'Legacy build of wheel for {!r} created no files.\n' + ).format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + 'Legacy build of wheel for {!r} created more than one file.\n' + 'Filenames (choosing first): {}\n' + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name, # type: str + setup_py_path, # type: str + source_dir, # type: str + global_options, # type: List[str] + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = 'Building wheel for %s (setup.py)' % (name,) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + + try: + output = call_subprocess( + wheel_args, + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error('Failed building wheel for %s', name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path