From 320f2169aefc207f711c18fe23a104095e9ad494 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 2 Sep 2018 18:49:57 -0400 Subject: [PATCH 01/35] Vendor passa as zip for running in venvs Signed-off-by: Dan Ryan --- tasks/__init__.py | 3 ++- tasks/vendoring/__init__.py | 1 - tasks/vendoring/vendor_passa.py | 20 ++++++++++++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 tasks/vendoring/vendor_passa.py diff --git a/tasks/__init__.py b/tasks/__init__.py index 581f50762c..a20a64b20f 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -4,5 +4,6 @@ import invoke from . import vendoring, release +from .vendoring import vendor_passa -ns = invoke.Collection(vendoring, release) +ns = invoke.Collection(vendoring, release, vendor_passa.vendor_passa) diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 9e36723bf5..c9ef3b32dc 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -306,7 +306,6 @@ def _ensure_package_in_requirements(ctx, requirements_file, package): return requirement - def install(ctx, vendor_dir, package=None): requirements_file = vendor_dir / "{0}.txt".format(vendor_dir.name) requirement = "-r {0}".format(requirements_file.as_posix()) diff --git a/tasks/vendoring/vendor_passa.py b/tasks/vendoring/vendor_passa.py new file mode 100644 index 0000000000..ad179a1076 --- /dev/null +++ b/tasks/vendoring/vendor_passa.py @@ -0,0 +1,20 @@ +from pipenv._compat import TemporaryDirectory +import invoke + + +from . import _get_git_root, _get_vendor_dir, log + + +@invoke.task +def vendor_passa(ctx): + with TemporaryDirectory(prefix='passa') as passa_dir: + vendor_dir = _get_vendor_dir(ctx).absolute().as_posix() + ctx.run("git clone https://github.com/sarugaku/passa.git {0}".format(passa_dir.name)) + with ctx.cd("{0}".format(passa_dir.name)): + # ctx.run("git checkout 0.3.0") + ctx.run("pip install plette[validation] requirementslib distlib pip-shims -q --exists-action=i") + log("Packing Passa") + ctx.run("invoke pack") + log("Moving pack to vendor dir!") + ctx.run("mv pack/passa.zip {0}".format(vendor_dir)) + log("Successfully vendored passa!") From a698fee9bb215743bac3a5cff8623444b71ae39a Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 4 Sep 2018 23:01:04 -0400 Subject: [PATCH 02/35] Add passa to vendoring task Signed-off-by: Dan Ryan --- tasks/vendoring/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index c9ef3b32dc..1441807957 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -627,5 +627,8 @@ def main(ctx, package=None): vendor_file = _vendor_dir / 'vendor.txt' vendor_file.write_bytes(vendor_src_file.read_bytes()) download_licenses(ctx, _vendor_dir) + from .vendor_passa import vendor_passa + log("Vendoring passa...") + vendor_passa() # update_safety(ctx) log('Revendoring complete') From 1af729f580588e468ff91dd5c50a306241f84863 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 01:58:24 -0400 Subject: [PATCH 03/35] Vendor passa, update piptools Signed-off-by: Dan Ryan --- pipenv/patched/patched.txt | 4 +- pipenv/patched/piptools/_compat/pip_compat.py | 55 +- pipenv/patched/piptools/cache.py | 2 +- pipenv/patched/piptools/io.py | 2 +- pipenv/patched/piptools/locations.py | 9 +- pipenv/patched/piptools/repositories/pypi.py | 293 +++--- pipenv/patched/piptools/resolver.py | 22 +- pipenv/patched/piptools/utils.py | 17 +- pipenv/vendor/passa/__init__.py | 7 + pipenv/vendor/passa/__main__.py | 6 + pipenv/vendor/passa/_pip.py | 315 ++++++ pipenv/vendor/passa/_pip_shims.py | 61 ++ pipenv/vendor/passa/caches.py | 214 ++++ pipenv/vendor/passa/candidates.py | 81 ++ pipenv/vendor/passa/cli.py | 115 +++ pipenv/vendor/passa/cli/__init__.py | 49 + pipenv/vendor/passa/cli/_base.py | 54 ++ pipenv/vendor/passa/cli/add.py | 99 ++ pipenv/vendor/passa/cli/clean.py | 38 + pipenv/vendor/passa/cli/freeze.py | 138 +++ pipenv/vendor/passa/cli/install.py | 63 ++ pipenv/vendor/passa/cli/lock.py | 29 + pipenv/vendor/passa/cli/remove.py | 75 ++ pipenv/vendor/passa/cli/sync.py | 46 + pipenv/vendor/passa/cli/upgrade.py | 90 ++ pipenv/vendor/passa/dependencies.py | 253 +++++ pipenv/vendor/passa/dependencies_pip.py | 187 ++++ pipenv/vendor/passa/hashes.py | 61 ++ pipenv/vendor/passa/internals/__init__.py | 0 pipenv/vendor/passa/internals/_pip.py | 317 ++++++ pipenv/vendor/passa/internals/_pip_shims.py | 61 ++ pipenv/vendor/passa/internals/caches.py | 214 ++++ pipenv/vendor/passa/internals/candidates.py | 81 ++ pipenv/vendor/passa/internals/dependencies.py | 253 +++++ pipenv/vendor/passa/internals/hashes.py | 61 ++ pipenv/vendor/passa/internals/lockers.py | 197 ++++ pipenv/vendor/passa/internals/markers.py | 101 ++ pipenv/vendor/passa/internals/metadata.py | 169 ++++ pipenv/vendor/passa/internals/providers.py | 184 ++++ pipenv/vendor/passa/internals/specifiers.py | 136 +++ .../vendor/passa/internals/synchronizers.py | 214 ++++ pipenv/vendor/passa/internals/traces.py | 40 + pipenv/vendor/passa/internals/utils.py | 106 ++ pipenv/vendor/passa/lockers.py | 182 ++++ pipenv/vendor/passa/locking.py | 105 ++ pipenv/vendor/passa/markers.py | 228 +++++ pipenv/vendor/passa/metadata.py | 169 ++++ pipenv/vendor/passa/operations/__init__.py | 0 pipenv/vendor/passa/operations/_utils.py | 0 pipenv/vendor/passa/operations/lock.py | 28 + pipenv/vendor/passa/operations/sync.py | 23 + pipenv/vendor/passa/projects.py | 235 +++++ pipenv/vendor/passa/providers.py | 167 ++++ pipenv/vendor/passa/reporters.py | 90 ++ pipenv/vendor/passa/reporters/__init__.py | 31 + pipenv/vendor/passa/reporters/base.py | 52 + pipenv/vendor/passa/reporters/stdout.py | 106 ++ pipenv/vendor/passa/synchronizers.py | 211 ++++ pipenv/vendor/passa/traces.py | 40 + pipenv/vendor/passa/utils.py | 97 ++ pipenv/vendor/passa/vcs.py | 37 + pipenv/vendor/yaspin/__init__.py | 10 + pipenv/vendor/yaspin/__version__.py | 1 + pipenv/vendor/yaspin/api.py | 88 ++ pipenv/vendor/yaspin/base_spinner.py | 16 + pipenv/vendor/yaspin/compat.py | 33 + pipenv/vendor/yaspin/constants.py | 110 +++ pipenv/vendor/yaspin/core.py | 534 ++++++++++ pipenv/vendor/yaspin/data/spinners.json | 912 ++++++++++++++++++ pipenv/vendor/yaspin/helpers.py | 19 + pipenv/vendor/yaspin/signal_handlers.py | 35 + pipenv/vendor/yaspin/spinners.py | 29 + pipenv/vendor/yaspin/termcolor.py | 168 ++++ .../vendoring/patches/patched/piptools.patch | 308 +++--- 74 files changed, 8221 insertions(+), 362 deletions(-) create mode 100644 pipenv/vendor/passa/__init__.py create mode 100644 pipenv/vendor/passa/__main__.py create mode 100644 pipenv/vendor/passa/_pip.py create mode 100644 pipenv/vendor/passa/_pip_shims.py create mode 100644 pipenv/vendor/passa/caches.py create mode 100644 pipenv/vendor/passa/candidates.py create mode 100644 pipenv/vendor/passa/cli.py create mode 100644 pipenv/vendor/passa/cli/__init__.py create mode 100644 pipenv/vendor/passa/cli/_base.py create mode 100644 pipenv/vendor/passa/cli/add.py create mode 100644 pipenv/vendor/passa/cli/clean.py create mode 100644 pipenv/vendor/passa/cli/freeze.py create mode 100644 pipenv/vendor/passa/cli/install.py create mode 100644 pipenv/vendor/passa/cli/lock.py create mode 100644 pipenv/vendor/passa/cli/remove.py create mode 100644 pipenv/vendor/passa/cli/sync.py create mode 100644 pipenv/vendor/passa/cli/upgrade.py create mode 100644 pipenv/vendor/passa/dependencies.py create mode 100644 pipenv/vendor/passa/dependencies_pip.py create mode 100644 pipenv/vendor/passa/hashes.py create mode 100644 pipenv/vendor/passa/internals/__init__.py create mode 100644 pipenv/vendor/passa/internals/_pip.py create mode 100644 pipenv/vendor/passa/internals/_pip_shims.py create mode 100644 pipenv/vendor/passa/internals/caches.py create mode 100644 pipenv/vendor/passa/internals/candidates.py create mode 100644 pipenv/vendor/passa/internals/dependencies.py create mode 100644 pipenv/vendor/passa/internals/hashes.py create mode 100644 pipenv/vendor/passa/internals/lockers.py create mode 100644 pipenv/vendor/passa/internals/markers.py create mode 100644 pipenv/vendor/passa/internals/metadata.py create mode 100644 pipenv/vendor/passa/internals/providers.py create mode 100644 pipenv/vendor/passa/internals/specifiers.py create mode 100644 pipenv/vendor/passa/internals/synchronizers.py create mode 100644 pipenv/vendor/passa/internals/traces.py create mode 100644 pipenv/vendor/passa/internals/utils.py create mode 100644 pipenv/vendor/passa/lockers.py create mode 100644 pipenv/vendor/passa/locking.py create mode 100644 pipenv/vendor/passa/markers.py create mode 100644 pipenv/vendor/passa/metadata.py create mode 100644 pipenv/vendor/passa/operations/__init__.py create mode 100644 pipenv/vendor/passa/operations/_utils.py create mode 100644 pipenv/vendor/passa/operations/lock.py create mode 100644 pipenv/vendor/passa/operations/sync.py create mode 100644 pipenv/vendor/passa/projects.py create mode 100644 pipenv/vendor/passa/providers.py create mode 100644 pipenv/vendor/passa/reporters.py create mode 100644 pipenv/vendor/passa/reporters/__init__.py create mode 100644 pipenv/vendor/passa/reporters/base.py create mode 100644 pipenv/vendor/passa/reporters/stdout.py create mode 100644 pipenv/vendor/passa/synchronizers.py create mode 100644 pipenv/vendor/passa/traces.py create mode 100644 pipenv/vendor/passa/utils.py create mode 100644 pipenv/vendor/passa/vcs.py create mode 100644 pipenv/vendor/yaspin/__init__.py create mode 100644 pipenv/vendor/yaspin/__version__.py create mode 100644 pipenv/vendor/yaspin/api.py create mode 100644 pipenv/vendor/yaspin/base_spinner.py create mode 100644 pipenv/vendor/yaspin/compat.py create mode 100644 pipenv/vendor/yaspin/constants.py create mode 100644 pipenv/vendor/yaspin/core.py create mode 100644 pipenv/vendor/yaspin/data/spinners.json create mode 100644 pipenv/vendor/yaspin/helpers.py create mode 100644 pipenv/vendor/yaspin/signal_handlers.py create mode 100644 pipenv/vendor/yaspin/spinners.py create mode 100644 pipenv/vendor/yaspin/termcolor.py diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index f8bdb9e102..d12555deaf 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -2,6 +2,6 @@ safety git+https://github.com/jumpscale7/python-consistent-toml.git#egg=contoml crayons==0.1.2 pipfile==0.0.2 -git+https://github.com/jazzband/pip-tools.git@9cb41d828fcb0967a32cc140c1dcaca94e5f4daa#egg=piptools +git+https://github.com/jazzband/pip-tools.git@19a3b1f11d941b01209bb4fad4a2a16d15f67171#egg=piptools prettytoml==0.3 -pip==10.0.1 +pip==18.0 diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py index 0a0d27dca8..3ea0826708 100644 --- a/pipenv/patched/piptools/_compat/pip_compat.py +++ b/pipenv/patched/piptools/_compat/pip_compat.py @@ -1,42 +1,37 @@ # -*- coding=utf-8 -*- import importlib - def do_import(module_path, subimport=None, old_path=None, vendored_name=None): - internal = 'pip._internal.{0}'.format(module_path) old_path = old_path or module_path - pip9 = 'pip.{0}'.format(old_path) - _tmp = None - if vendored_name: - vendor = '{0}._internal'.format(vendored_name) - vendor = '{0}.{1}'.format(vendor, old_path if old_path else module_path) - try: - _tmp = importlib.import_module(vendor) - except ImportError: - pass - if not _tmp: + prefix = vendored_name if vendored_name else "pip" + prefixes = ["{0}._internal".format(prefix), "{0}".format(prefix)] + paths = [module_path, old_path] + search_order = ["{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None] + package = subimport if subimport else None + for to_import in search_order: + if not subimport: + to_import, _, package = to_import.rpartition(".") try: - _tmp = importlib.import_module(internal) + imported = importlib.import_module(to_import) except ImportError: - _tmp = importlib.import_module(pip9) - if subimport: - return getattr(_tmp, subimport, _tmp) - return _tmp + continue + else: + return getattr(imported, package) -InstallRequirement = do_import('req.req_install', 'InstallRequirement', vendored_name='notpip') -parse_requirements = do_import('req.req_file', 'parse_requirements', vendored_name='notpip') -RequirementSet = do_import('req.req_set', 'RequirementSet', vendored_name='notpip') -user_cache_dir = do_import('utils.appdirs', 'user_cache_dir', vendored_name='notpip') -FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH', vendored_name='notpip') -is_file_url = do_import('download', 'is_file_url', vendored_name='notpip') -url_to_path = do_import('download', 'url_to_path', vendored_name='notpip') -PackageFinder = do_import('index', 'PackageFinder', vendored_name='notpip') -FormatControl = do_import('index', 'FormatControl', vendored_name='notpip') -Wheel = do_import('wheel', 'Wheel', vendored_name='notpip') -Command = do_import('basecommand', 'Command', vendored_name='notpip') -cmdoptions = do_import('cmdoptions', vendored_name='notpip') -get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils', vendored_name='notpip') +InstallRequirement = do_import('req.req_install', 'InstallRequirement', vendored_name="notpip") +parse_requirements = do_import('req.req_file', 'parse_requirements', vendored_name="notpip") +RequirementSet = do_import('req.req_set', 'RequirementSet', vendored_name="notpip") +user_cache_dir = do_import('utils.appdirs', 'user_cache_dir', vendored_name="notpip") +FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH', vendored_name="notpip") +is_file_url = do_import('download', 'is_file_url', vendored_name="notpip") +url_to_path = do_import('download', 'url_to_path', vendored_name="notpip") +PackageFinder = do_import('index', 'PackageFinder', vendored_name="notpip") +FormatControl = do_import('index', 'FormatControl', vendored_name="notpip") +Wheel = do_import('wheel', 'Wheel', vendored_name="notpip") +Command = do_import('cli.base_command', 'Command', old_path='basecommand', vendored_name="notpip") +cmdoptions = do_import('cli.cmdoptions', old_path='cmdoptions', vendored_name="notpip") +get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils', vendored_name="notpip") PyPI = do_import('models.index', 'PyPI', vendored_name='notpip') SafeFileCache = do_import('download', 'SafeFileCache', vendored_name='notpip') InstallationError = do_import('exceptions', 'InstallationError', vendored_name='notpip') diff --git a/pipenv/patched/piptools/cache.py b/pipenv/patched/piptools/cache.py index 610a4f37d1..7595b964c2 100644 --- a/pipenv/patched/piptools/cache.py +++ b/pipenv/patched/piptools/cache.py @@ -6,7 +6,7 @@ import os import sys -from pipenv.patched.notpip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.requirements import Requirement from .exceptions import PipToolsError from .locations import CACHE_DIR diff --git a/pipenv/patched/piptools/io.py b/pipenv/patched/piptools/io.py index e9526832f4..b6bca6756b 100644 --- a/pipenv/patched/piptools/io.py +++ b/pipenv/patched/piptools/io.py @@ -347,7 +347,7 @@ class AtomicSaver(object): writer at a time. * Optional recovery of partial data in failure cases. - .. _context manager: https://docs.python.org/2/reference/compound_stmts.html#with + .. _context manager: https://docs.python.org/3/reference/compound_stmts.html#with .. _umask: https://en.wikipedia.org/wiki/Umask """ diff --git a/pipenv/patched/piptools/locations.py b/pipenv/patched/piptools/locations.py index fbc820ab5a..4e6174c550 100644 --- a/pipenv/patched/piptools/locations.py +++ b/pipenv/patched/piptools/locations.py @@ -2,13 +2,10 @@ from shutil import rmtree from .click import secho -# Patch by vphilippon 2017-11-22: Use pipenv cache path. -# from ._compat import user_cache_dir -from pipenv.environments import PIPENV_CACHE_DIR +from ._compat import user_cache_dir -# The user_cache_dir helper comes straight from pipenv.patched.notpip itself -# CACHE_DIR = user_cache_dir(os.path.join('pip-tools')) -CACHE_DIR = PIPENV_CACHE_DIR +# The user_cache_dir helper comes straight from pip itself +CACHE_DIR = user_cache_dir('pip-tools') # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 9e74156055..eb20560d94 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -4,7 +4,6 @@ import copy import hashlib import os -import sys from contextlib import contextmanager from shutil import rmtree @@ -18,33 +17,29 @@ TemporaryDirectory, PyPI, InstallRequirement, - SafeFileCache, + SafeFileCache ) +os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" +from pip_shims.shims import pip_import, VcsSupport, WheelCache +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier +from packaging.markers import Op, Value, Variable, Marker +InstallationError = pip_import("InstallationError", "exceptions.InstallationError", "7.0", "9999") +from notpip._internal.resolve import Resolver as PipResolver -from pipenv.patched.notpip._vendor.packaging.requirements import Requirement -from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet, Specifier -from pipenv.patched.notpip._vendor.packaging.markers import Op, Value, Variable -from pipenv.patched.notpip._internal.exceptions import InstallationError -from pipenv.patched.notpip._internal.vcs import VcsSupport -from pipenv.environments import PIPENV_CACHE_DIR +from pipenv.environments import PIPENV_CACHE_DIR as CACHE_DIR from ..exceptions import NoCandidateFound -from ..utils import (fs_str, is_pinned_requirement, lookup_table, +from ..utils import (fs_str, is_pinned_requirement, lookup_table, dedup, make_install_requirement, clean_requires_python) - from .base import BaseRepository - -try: - from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer - from pipenv.patched.notpip._internal.resolve import Resolver as PipResolver -except ImportError: - pass - try: - from pipenv.patched.notpip._internal.cache import WheelCache + from notpip._internal.req.req_tracker import RequirementTracker except ImportError: - from pipenv.patched.notpip.wheel import WheelCache + @contextmanager + def RequirementTracker(): + yield class HashCache(SafeFileCache): @@ -56,7 +51,7 @@ class HashCache(SafeFileCache): def __init__(self, *args, **kwargs): session = kwargs.pop('session') self.session = session - kwargs.setdefault('directory', os.path.join(PIPENV_CACHE_DIR, 'hash-cache')) + kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache')) super(HashCache, self).__init__(*args, **kwargs) def get_hash(self, location): @@ -99,7 +94,6 @@ def __init__(self, pip_options, session, use_json=False): self.session = session self.use_json = use_json self.pip_options = pip_options - self.wheel_cache = WheelCache(PIPENV_CACHE_DIR, pip_options.format_control) index_urls = [pip_options.index_url] + pip_options.extra_index_urls if pip_options.no_index: @@ -131,8 +125,8 @@ def __init__(self, pip_options, session, use_json=False): # Setup file paths self.freshen_build_caches() - self._download_dir = fs_str(os.path.join(PIPENV_CACHE_DIR, 'pkgs')) - self._wheel_download_dir = fs_str(os.path.join(PIPENV_CACHE_DIR, 'wheels')) + self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs')) + self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels')) def freshen_build_caches(self): """ @@ -169,7 +163,6 @@ def find_best_match(self, ireq, prereleases=None): return ireq # return itself as the best match all_candidates = clean_requires_python(self.find_all_candidates(ireq.name)) - candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True) try: matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates), @@ -184,14 +177,10 @@ def find_best_match(self, ireq, prereleases=None): best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key) # Turn the candidate into a pinned InstallRequirement - new_req = make_install_requirement( - best_candidate.project, best_candidate.version, ireq.extras, ireq.markers, constraint=ireq.constraint + return make_install_requirement( + best_candidate.project, best_candidate.version, ireq.extras, ireq.markers, constraint=ireq.constraint ) - # KR TODO: Marker here? - - return new_req - def get_json_dependencies(self, ireq): if not (is_pinned_requirement(ireq)): @@ -248,116 +237,86 @@ def get_dependencies(self, ireq): return json_results - def get_legacy_dependencies(self, ireq): - """ - Given a pinned or an editable InstallRequirement, returns a set of - dependencies (also InstallRequirements, but not necessarily pinned). - They indicate the secondary dependencies for the given requirement. - """ - if not (ireq.editable or is_pinned_requirement(ireq)): - raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq)) + def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist=None): + results = None + setup_requires = {} + dist = None + try: + from notpip._internal.operations.prepare import RequirementPreparer + except ImportError: + # Pip 9 and below + reqset = RequirementSet( + self.build_dir, + self.source_dir, + download_dir=download_dir, + wheel_download_dir=self._wheel_download_dir, + session=self.session, + ignore_installed=True, + ignore_compatibility=False, + wheel_cache=wheel_cache + ) + results = reqset._prepare_file(self.finder, ireq, ignore_requires_python=True) + else: + # pip >= 10 + preparer_kwargs = { + 'build_dir': self.build_dir, + 'src_dir': self.source_dir, + 'download_dir': download_dir, + 'wheel_download_dir': self._wheel_download_dir, + 'progress_bar': 'off', + 'build_isolation': True + } + resolver_kwargs = { + 'finder': self.finder, + 'session': self.session, + 'upgrade_strategy': "to-satisfy-only", + 'force_reinstall': True, + 'ignore_dependencies': False, + 'ignore_requires_python': True, + 'ignore_installed': True, + 'isolated': False, + 'wheel_cache': wheel_cache, + 'use_user_site': False, + 'ignore_compatibility': True + } + resolver = None + preparer = None + with RequirementTracker() as req_tracker: + # Pip 18 uses a requirement tracker to prevent fork bombs + if req_tracker: + preparer_kwargs['req_tracker'] = req_tracker + preparer = RequirementPreparer(**preparer_kwargs) + resolver_kwargs['preparer'] = preparer + reqset = RequirementSet() + ireq.is_direct = True + # reqset.add_requirement(ireq) + resolver = PipResolver(**resolver_kwargs) + resolver.require_hashes = False + results = resolver._resolve_one(reqset, ireq) + reqset.cleanup_files() - if ireq not in self._dependencies_cache: - if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): - # No download_dir for locally available editable requirements. - # If a download_dir is passed, pip will unnecessarely - # archive the entire source directory - download_dir = None - elif ireq.link and not ireq.link.is_artifact: - # No download_dir for VCS sources. This also works around pip - # using git-checkout-index, which gets rid of the .git dir. - download_dir = None - else: - download_dir = self._download_dir - if not os.path.isdir(download_dir): - os.makedirs(download_dir) - if not os.path.isdir(self._wheel_download_dir): - os.makedirs(self._wheel_download_dir) + if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): # Collect setup_requires info from local eggs. # Do this after we call the preparer on these reqs to make sure their # egg info has been created - setup_requires = {} - dist = None - if ireq.editable: - try: - from pipenv.utils import chdir - with chdir(ireq.setup_py_dir): - from setuptools.dist import distutils - dist = distutils.core.run_setup(ireq.setup_py) - except (ImportError, InstallationError, TypeError, AttributeError): - pass + from pipenv.utils import chdir + with chdir(ireq.setup_py_dir): try: - dist = ireq.get_dist() if not dist else dist + from setuptools.dist import distutils + dist = distutils.core.run_setup(ireq.setup_py) except InstallationError: ireq.run_egg_info() - dist = ireq.get_dist() except (TypeError, ValueError, AttributeError): pass - else: - setup_requires = getattr(dist, "extras_require", None) - if not setup_requires: - setup_requires = {"setup_requires": getattr(dist, "setup_requires", None)} - try: - # Pip 9 and below - reqset = RequirementSet( - self.build_dir, - self.source_dir, - download_dir=download_dir, - wheel_download_dir=self._wheel_download_dir, - session=self.session, - ignore_installed=True, - ignore_compatibility=False, - wheel_cache=self.wheel_cache, - ) - result = reqset._prepare_file( - self.finder, - ireq, - ignore_requires_python=True - ) - except TypeError: - # Pip >= 10 (new resolver!) - preparer = RequirementPreparer( - build_dir=self.build_dir, - src_dir=self.source_dir, - download_dir=download_dir, - wheel_download_dir=self._wheel_download_dir, - progress_bar='off', - build_isolation=False - ) - reqset = RequirementSet() - ireq.is_direct = True - reqset.add_requirement(ireq) - self.resolver = PipResolver( - preparer=preparer, - finder=self.finder, - session=self.session, - upgrade_strategy="to-satisfy-only", - force_reinstall=True, - ignore_dependencies=False, - ignore_requires_python=True, - ignore_installed=True, - isolated=False, - wheel_cache=self.wheel_cache, - use_user_site=False, - ignore_compatibility=False - ) - self.resolver.resolve(reqset) - result = set(reqset.requirements.values()) - - # HACK: Sometimes the InstallRequirement doesn't properly get - # these values set on it during the resolution process. It's - # difficult to pin down what is going wrong. This fixes things. - if not getattr(ireq, 'version', None): - try: - dist = ireq.get_dist() if not dist else None - ireq.version = ireq.get_dist().version - except (ValueError, OSError, TypeError, AttributeError) as e: - pass - if not getattr(ireq, 'project_name', None): - try: - ireq.project_name = dist.project_name if dist else None - except (ValueError, TypeError) as e: - pass + if not dist: + try: + dist = ireq.get_dist() + except (ImportError, ValueError, TypeError, AttributeError): + pass + if ireq.editable and dist: + setup_requires = getattr(dist, "extras_require", None) + if not setup_requires: + setup_requires = {"setup_requires": getattr(dist, "setup_requires", None)} if not getattr(ireq, 'req', None): try: ireq.req = dist.as_requirement() if dist else None @@ -385,14 +344,14 @@ def get_legacy_dependencies(self, ireq): if ':' not in value: try: if not not_python: - result = result + [InstallRequirement.from_line("{0}{1}".format(value, python_version).replace(':', ';'))] + results.add(InstallRequirement.from_line("{0}{1}".format(value, python_version).replace(':', ';'))) # Anything could go wrong here -- can't be too careful. except Exception: pass # this section properly creates 'python_version' markers for cross-python # virtualenv creation and for multi-python compatibility. - requires_python = reqset.requires_python if hasattr(reqset, 'requires_python') else self.resolver.requires_python + requires_python = reqset.requires_python if hasattr(reqset, 'requires_python') else resolver.requires_python if requires_python: marker_str = '' # This corrects a logic error from the previous code which said that if @@ -402,28 +361,68 @@ def get_legacy_dependencies(self, ireq): if any(requires_python.startswith(op) for op in Specifier._operators.keys()): # We are checking first if we have leading specifier operator # if not, we can assume we should be doing a == comparison - specifierset = list(SpecifierSet(requires_python)) + specifierset = SpecifierSet(requires_python) # for multiple specifiers, the correct way to represent that in # a specifierset is `Requirement('fakepkg; python_version<"3.0,>=2.6"')` - marker_key = Variable('python_version') - markers = [] - for spec in specifierset: - operator, val = spec._spec - operator = Op(operator) - val = Value(val) - markers.append(''.join([marker_key.serialize(), operator.serialize(), val.serialize()])) - marker_str = ' and '.join(markers) + from passa.internals.specifiers import cleanup_pyspecs + marker_str = str(Marker(" and ".join(dedup([ + "python_version {0[0]} '{0[1]}'".format(spec) + for spec in cleanup_pyspecs(specifierset) + ])))) # The best way to add markers to a requirement is to make a separate requirement # with only markers on it, and then to transfer the object istelf marker_to_add = Requirement('fakepkg; {0}'.format(marker_str)).marker - result.remove(ireq) + if ireq in results: + results.remove(ireq) + print(marker_to_add) ireq.req.marker = marker_to_add - result.add(ireq) - self._dependencies_cache[ireq] = result - reqset.cleanup_files() + results = set(results) if results else set() + return results, ireq + + def get_legacy_dependencies(self, ireq): + """ + Given a pinned or an editable InstallRequirement, returns a set of + dependencies (also InstallRequirements, but not necessarily pinned). + They indicate the secondary dependencies for the given requirement. + """ + if not (ireq.editable or is_pinned_requirement(ireq)): + raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq)) + + if ireq not in self._dependencies_cache: + if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): + # No download_dir for locally available editable requirements. + # If a download_dir is passed, pip will unnecessarely + # archive the entire source directory + download_dir = None + + elif ireq.link and not ireq.link.is_artifact: + # No download_dir for VCS sources. This also works around pip + # using git-checkout-index, which gets rid of the .git dir. + download_dir = None + else: + download_dir = self._download_dir + if not os.path.isdir(download_dir): + os.makedirs(download_dir) + if not os.path.isdir(self._wheel_download_dir): + os.makedirs(self._wheel_download_dir) - return set(self._dependencies_cache[ireq]) + wheel_cache = WheelCache(CACHE_DIR, self.pip_options.format_control) + prev_tracker = os.environ.get('PIP_REQ_TRACKER') + try: + results, ireq = self.resolve_reqs(download_dir, ireq, wheel_cache) + self._dependencies_cache[ireq] = results + finally: + if 'PIP_REQ_TRACKER' in os.environ: + if prev_tracker: + os.environ['PIP_REQ_TRACKER'] = prev_tracker + else: + del os.environ['PIP_REQ_TRACKER'] + try: + self.wheel_cache.cleanup() + except AttributeError: + pass + return self._dependencies_cache[ireq] def get_hashes(self, ireq): """ diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py index 807cf518b0..d5a471d443 100644 --- a/pipenv/patched/piptools/resolver.py +++ b/pipenv/patched/piptools/resolver.py @@ -7,8 +7,6 @@ from itertools import chain, count import os -from first import first -from pipenv.patched.notpip._vendor.packaging.markers import default_environment from ._compat import InstallRequirement from . import click @@ -73,7 +71,7 @@ def resolve_hashes(self, ireqs): with self.repository.allow_all_wheels(): return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs} - def resolve(self, max_rounds=12): + def resolve(self, max_rounds=10): """ Finds concrete package versions for all the given InstallRequirements and their recursive dependencies. The end result is a flat list of @@ -145,20 +143,18 @@ def _group_constraints(self, constraints): """ for _, ireqs in full_groupby(constraints, key=key_from_ireq): ireqs = list(ireqs) - editable_ireq = first(ireqs, key=lambda ireq: ireq.editable) + editable_ireq = next((ireq for ireq in ireqs if ireq.editable), None) if editable_ireq: yield editable_ireq # ignore all the other specs: the editable one is the one that counts continue + ireqs = iter(ireqs) # deepcopy the accumulator so as to not modify the self.our_constraints invariant combined_ireq = copy.deepcopy(next(ireqs)) + combined_ireq.comes_from = None for ireq in ireqs: # NOTE we may be losing some info on dropped reqs here - try: - combined_ireq.req.specifier &= ireq.req.specifier - except TypeError: - if ireq.req.specifier._specs and not combined_ireq.req.specifier._specs: - combined_ireq.req.specifier._specs = ireq.req.specifier._specs + combined_ireq.req.specifier &= ireq.req.specifier combined_ireq.constraint &= ireq.constraint if not combined_ireq.markers: combined_ireq.markers = ireq.markers @@ -166,6 +162,7 @@ def _group_constraints(self, constraints): _markers = combined_ireq.markers._markers if not isinstance(_markers[0], (tuple, list)): combined_ireq.markers._markers = [_markers, 'and', ireq.markers._markers] + # Return a sorted, de-duped tuple of extras combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))) yield combined_ireq @@ -286,11 +283,12 @@ def _iter_dependencies(self, ireq): # fix our malformed extras if ireq.extras: - if hasattr(ireq, 'extra'): + if getattr(ireq, "extra", None): if ireq.extras: ireq.extras.extend(ireq.extra) else: ireq.extras = ireq.extra + elif not is_pinned_requirement(ireq): raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq)) @@ -301,14 +299,14 @@ def _iter_dependencies(self, ireq): if ireq not in self.dependency_cache: log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow') dependencies = self.repository.get_dependencies(ireq) - self.dependency_cache[ireq] = sorted(format_requirement(_ireq) for _ireq in dependencies) + self.dependency_cache[ireq] = sorted(set(format_requirement(ireq) for ireq in dependencies)) # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4'] dependency_strings = self.dependency_cache[ireq] log.debug(' {:25} requires {}'.format(format_requirement(ireq), ', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-')) for dependency_string in dependency_strings: - yield InstallRequirement.from_line(dependency_string, constraint=ireq.constraint) + yield InstallRequirement.from_line(dependency_string, constraint=ireq.constraint) def reverse_dependencies(self, ireqs): non_editable = [ireq for ireq in ireqs if not ireq.editable] diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index 2f389eecaf..6225d7e21a 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -2,20 +2,19 @@ from __future__ import (absolute_import, division, print_function, unicode_literals) -import six import os import sys +import six from itertools import chain, groupby from collections import OrderedDict from contextlib import contextmanager from ._compat import InstallRequirement -from first import first -from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier -from pipenv.patched.notpip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version -from pipenv.patched.notpip._vendor.packaging.markers import Marker, Op, Value, Variable from .click import style +from pip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier +from pip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version +from pip._vendor.packaging.markers import Marker, Op, Value, Variable UNSAFE_PACKAGES = {'setuptools', 'distribute', 'pip'} @@ -158,10 +157,6 @@ def _requirement_to_str_lowercase_name(requirement): def format_requirement(ireq, marker=None): - """ - Generic formatter for pretty printing InstallRequirements to the terminal - in a less verbose way than using its `__str__` method. - """ if ireq.editable: line = '-e {}'.format(ireq.link) else: @@ -207,7 +202,7 @@ def is_pinned_requirement(ireq): if len(ireq.specifier._specs) != 1: return False - op, version = first(ireq.specifier._specs)._spec + op, version = next(iter(ireq.specifier._specs))._spec return (op == '==' or op == '===') and not version.endswith('.*') @@ -219,7 +214,7 @@ def as_tuple(ireq): raise TypeError('Expected a pinned InstallRequirement, got {}'.format(ireq)) name = key_from_req(ireq.req) - version = first(ireq.specifier._specs)._spec[1] + version = next(iter(ireq.specifier._specs))._spec[1] extras = tuple(sorted(ireq.extras)) return name, version, extras diff --git a/pipenv/vendor/passa/__init__.py b/pipenv/vendor/passa/__init__.py new file mode 100644 index 0000000000..6f92267d8a --- /dev/null +++ b/pipenv/vendor/passa/__init__.py @@ -0,0 +1,7 @@ +# -*- coding=utf-8 -*- + +__all__ = [ + '__version__' +] + +__version__ = '0.3.0' diff --git a/pipenv/vendor/passa/__main__.py b/pipenv/vendor/passa/__main__.py new file mode 100644 index 0000000000..76c2e6a624 --- /dev/null +++ b/pipenv/vendor/passa/__main__.py @@ -0,0 +1,6 @@ +# -*- coding=utf-8 -*- + +from .cli import main + +if __name__ == '__main__': + main() diff --git a/pipenv/vendor/passa/_pip.py b/pipenv/vendor/passa/_pip.py new file mode 100644 index 0000000000..5cf1cea8a9 --- /dev/null +++ b/pipenv/vendor/passa/_pip.py @@ -0,0 +1,315 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import contextlib +import distutils.log +import os + +import setuptools.dist + +import distlib.scripts +import distlib.wheel +import pip_shims +import six +import vistir + +from ._pip_shims import VCS_SUPPORT, build_wheel as _build_wheel, unpack_url +from .caches import CACHE_DIR +from .utils import filter_sources + + +@vistir.path.ensure_mkdir_p(mode=0o775) +def _get_src_dir(): + src = os.environ.get("PIP_SRC") + if src: + return src + virtual_env = os.environ.get("VIRTUAL_ENV") + if virtual_env: + return os.path.join(virtual_env, "src") + temp_src = vistir.path.create_tracked_tempdir(prefix='passa-src') + return temp_src + + +def _prepare_wheel_building_kwargs(ireq): + download_dir = os.path.join(CACHE_DIR, "pkgs") + vistir.mkdir_p(download_dir) + + wheel_download_dir = os.path.join(CACHE_DIR, "wheels") + vistir.mkdir_p(wheel_download_dir) + + if ireq.source_dir is None: + src_dir = _get_src_dir() + else: + src_dir = ireq.source_dir + + # This logic matches pip's behavior, although I don't fully understand the + # intention. I guess the idea is to build editables in-place, otherwise out + # of the source tree? + if ireq.editable: + build_dir = src_dir + else: + build_dir = vistir.path.create_tracked_tempdir(prefix="passa-build") + + return { + "build_dir": build_dir, + "src_dir": src_dir, + "download_dir": download_dir, + "wheel_download_dir": wheel_download_dir, + } + + +def _get_pip_index_urls(sources): + index_urls = [] + trusted_hosts = [] + for source in sources: + url = source.get("url") + if not url: + continue + index_urls.append(url) + if source.get("verify_ssl", True): + continue + host = six.moves.urllib.parse.urlparse(source["url"]).hostname + trusted_hosts.append(host) + return index_urls, trusted_hosts + + +class _PipCommand(pip_shims.Command): + name = "PipCommand" + + +def _get_pip_session(trusted_hosts): + cmd = _PipCommand() + options, _ = cmd.parser.parse_args([]) + options.cache_dir = CACHE_DIR + options.trusted_hosts = trusted_hosts + session = cmd._build_session(options) + return session + + +def _get_finder(sources): + index_urls, trusted_hosts = _get_pip_index_urls(sources) + session = _get_pip_session(trusted_hosts) + finder = pip_shims.PackageFinder( + find_links=[], + index_urls=index_urls, + trusted_hosts=trusted_hosts, + allow_all_prereleases=True, + session=session, + ) + return finder + + +def _get_wheel_cache(): + format_control = pip_shims.FormatControl(set(), set()) + wheel_cache = pip_shims.WheelCache(CACHE_DIR, format_control) + return wheel_cache + + +def _convert_hashes(values): + """Convert Pipfile.lock hash lines into InstallRequirement option format. + + The option format uses a str-list mapping. Keys are hash algorithms, and + the list contains all values of that algorithm. + """ + hashes = {} + if not values: + return hashes + for value in values: + try: + name, value = value.split(":", 1) + except ValueError: + name = "sha256" + if name not in hashes: + hashes[name] = [] + hashes[name].append(value) + return hashes + + +def build_wheel(ireq, sources, hashes=None): + """Build a wheel file for the InstallRequirement object. + + An artifact is downloaded (or read from cache). If the artifact is not a + wheel, build one out of it. The dynamically built wheel is ephemeral; do + not depend on its existence after the returned wheel goes out of scope. + + If `hashes` is truthy, it is assumed to be a list of hashes (as formatted + in Pipfile.lock) to be checked against the download. + + Returns a `distlib.wheel.Wheel` instance. Raises a `RuntimeError` if the + wheel cannot be built. + """ + kwargs = _prepare_wheel_building_kwargs(ireq) + finder = _get_finder(sources) + + # Not for upgrade, hash not required. Hashes are not required here even + # when we provide them, because pip skips local wheel cache if we set it + # to True. Hashes are checked later if we need to download the file. + ireq.populate_link(finder, False, False) + + # Ensure ireq.source_dir is set. + # This is intentionally set to build_dir, not src_dir. Comments from pip: + # [...] if filesystem packages are not marked editable in a req, a non + # deterministic error occurs when the script attempts to unpack the + # build directory. + # Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq + # is editable, build_dir is actually src_dir, making the build in-place. + ireq.ensure_has_source_dir(kwargs["build_dir"]) + + # Ensure the remote artifact is downloaded locally. For wheels, it is + # enough to just download because we'll use them directly. For an sdist, + # we need to unpack so we can build it. + if not pip_shims.is_file_url(ireq.link): + if ireq.is_wheel: + only_download = True + download_dir = kwargs["wheel_download_dir"] + else: + only_download = False + download_dir = kwargs["download_dir"] + ireq.options["hashes"] = _convert_hashes(hashes) + unpack_url( + ireq.link, ireq.source_dir, download_dir, + only_download=only_download, session=finder.session, + hashes=ireq.hashes(False), progress_bar=False, + ) + + if ireq.is_wheel: + # If this is a wheel, use the downloaded thing. + output_dir = kwargs["wheel_download_dir"] + wheel_path = os.path.join(output_dir, ireq.link.filename) + else: + # Othereise we need to build an ephemeral wheel. + wheel_path = _build_wheel( + ireq, vistir.path.create_tracked_tempdir(prefix="ephem"), + finder, _get_wheel_cache(), kwargs, + ) + if wheel_path is None or not os.path.exists(wheel_path): + raise RuntimeError("failed to build wheel from {}".format(ireq)) + return distlib.wheel.Wheel(wheel_path) + + +def _obtrain_ref(vcs_obj, src_dir, name, rev=None): + target_dir = os.path.join(src_dir, name) + target_rev = vcs_obj.make_rev_options(rev) + if not os.path.exists(target_dir): + vcs_obj.obtain(target_dir) + if (not vcs_obj.is_commit_id_equal(target_dir, rev) and + not vcs_obj.is_commit_id_equal(target_dir, target_rev)): + vcs_obj.update(target_dir, target_rev) + return vcs_obj.get_revision(target_dir) + + +def get_vcs_ref(requirement): + backend = VCS_SUPPORT._registry.get(requirement.vcs) + vcs = backend(url=requirement.req.vcs_uri) + src = _get_src_dir() + name = requirement.normalized_name + ref = _obtrain_ref(vcs, src, name, rev=requirement.req.ref) + return ref + + +def find_installation_candidates(ireq, sources): + finder = _get_finder(sources) + return finder.find_all_candidates(ireq.name) + + +class RequirementUninstallation(object): + """A context manager to remove a package for the inner block. + + This uses `UninstallPathSet` to control the workflow. If the inner block + exits correctly, the uninstallation is committed, otherwise rolled back. + """ + def __init__(self, ireq, auto_confirm, verbose): + self.ireq = ireq + self.pathset = None + self.auto_confirm = auto_confirm + self.verbose = verbose + + def __enter__(self): + self.pathset = self.ireq.uninstall( + auto_confirm=self.auto_confirm, + verbose=self.verbose, + ) + return self.pathset + + def __exit__(self, exc_type, exc_value, traceback): + if self.pathset is None: + return + if exc_type is None: + self.pathset.commit() + else: + self.pathset.rollback() + + +def uninstall_requirement(ireq, **kwargs): + return RequirementUninstallation(ireq, **kwargs) + + +@contextlib.contextmanager +def _suppress_distutils_logs(): + """Hack to hide noise generated by `setup.py develop`. + + There isn't a good way to suppress them now, so let's monky-patch. + See https://bugs.python.org/issue25392. + """ + f = distutils.log.Log._log + + def _log(log, level, msg, args): + if level >= distutils.log.ERROR: + f(log, level, msg, args) + + distutils.log.Log._log = _log + yield + distutils.log.Log._log = f + + +class NoopInstaller(object): + """An installer. + + This class is not designed to be instantiated by itself, but used as a + common interface for subclassing. + + An installer has two methods, `prepare()` and `install()`. Neither takes + arguments, and should be called in that order to prepare an installation + operation, and to actually install things. + """ + def prepare(self): + pass + + def install(self): + pass + + +class EditableInstaller(NoopInstaller): + """Installer to handle editable. + """ + def __init__(self, requirement): + ireq = requirement.as_ireq() + self.working_directory = ireq.setup_py_dir + self.setup_py = ireq.setup_py + + def install(self): + with vistir.cd(self.working_directory), _suppress_distutils_logs(): + # Access from Setuptools to ensure things are patched correctly. + setuptools.dist.distutils.core.run_setup( + self.setup_py, ["develop", "--no-deps"], + ) + + +class WheelInstaller(NoopInstaller): + """Installer by building a wheel. + + The wheel is built during `prepare()`, and installed in `install()`. + """ + def __init__(self, requirement, sources, paths): + self.ireq = requirement.as_ireq() + self.sources = filter_sources(requirement, sources) + self.hashes = requirement.hashes or None + self.paths = paths + self.wheel = None + + def prepare(self): + self.wheel = build_wheel(self.ireq, self.sources, self.hashes) + + def install(self): + self.wheel.install(self.paths, distlib.scripts.ScriptMaker(None, None)) diff --git a/pipenv/vendor/passa/_pip_shims.py b/pipenv/vendor/passa/_pip_shims.py new file mode 100644 index 0000000000..b2c7b6ea31 --- /dev/null +++ b/pipenv/vendor/passa/_pip_shims.py @@ -0,0 +1,61 @@ +# -*- coding=utf-8 -*- + +"""Shims to make the pip interface more consistent accross versions. + +There are currently two members: + +* VCS_SUPPORT is an instance of VcsSupport. +* build_wheel abstracts the process to build a wheel out of a bunch parameters. +* unpack_url wraps the actual function in pip to accept modern parameters. +""" + +from __future__ import absolute_import, unicode_literals + +import pip_shims + + +def _build_wheel_pre10(ireq, output_dir, finder, wheel_cache, kwargs): + kwargs.update({"wheel_cache": wheel_cache, "session": finder.session}) + reqset = pip_shims.RequirementSet(**kwargs) + builder = pip_shims.WheelBuilder(reqset, finder) + return builder._build_one(ireq, output_dir) + + +def _build_wheel_modern(ireq, output_dir, finder, wheel_cache, kwargs): + """Build a wheel. + + * ireq: The InstallRequirement object to build + * output_dir: The directory to build the wheel in. + * finder: pip's internal Finder object to find the source out of ireq. + * kwargs: Various keyword arguments from `_prepare_wheel_building_kwargs`. + """ + kwargs.update({"progress_bar": "off", "build_isolation": False}) + with pip_shims.RequirementTracker() as req_tracker: + if req_tracker: + kwargs["req_tracker"] = req_tracker + preparer = pip_shims.RequirementPreparer(**kwargs) + builder = pip_shims.WheelBuilder(finder, preparer, wheel_cache) + return builder._build_one(ireq, output_dir) + + +def _unpack_url_pre10(*args, **kwargs): + """Shim for unpack_url in various pip versions. + + pip before 10.0 does not accept `progress_bar` here. Simply drop it. + """ + kwargs.pop("progress_bar", None) + return pip_shims.unpack_url(*args, **kwargs) + + +PIP_VERSION = pip_shims.utils._parse(pip_shims.pip_version) +VERSION_10 = pip_shims.utils._parse("10") + + +VCS_SUPPORT = pip_shims.VcsSupport() + +build_wheel = _build_wheel_modern +unpack_url = pip_shims.unpack_url + +if PIP_VERSION < VERSION_10: + build_wheel = _build_wheel_pre10 + unpack_url = _unpack_url_pre10 diff --git a/pipenv/vendor/passa/caches.py b/pipenv/vendor/passa/caches.py new file mode 100644 index 0000000000..6d3131fa53 --- /dev/null +++ b/pipenv/vendor/passa/caches.py @@ -0,0 +1,214 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import copy +import hashlib +import json +import os +import sys + +import appdirs +import pip_shims +import requests +import vistir + +from ._pip_shims import VCS_SUPPORT +from .utils import get_pinned_version + + +CACHE_DIR = os.environ.get("PASSA_CACHE_DIR", appdirs.user_cache_dir("passa")) + + +class HashCache(pip_shims.SafeFileCache): + """Caches hashes of PyPI artifacts so we do not need to re-download them. + + Hashes are only cached when the URL appears to contain a hash in it and the + cache key includes the hash value returned from the server). This ought to + avoid ssues where the location on the server changes. + """ + def __init__(self, *args, **kwargs): + session = kwargs.pop('session', requests.session()) + self.session = session + kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache')) + super(HashCache, self).__init__(*args, **kwargs) + + def get_hash(self, location): + # If there is no location hash (i.e., md5, sha256, etc.), we don't want + # to store it. + hash_value = None + orig_scheme = location.scheme + new_location = copy.deepcopy(location) + if orig_scheme in VCS_SUPPORT.all_schemes: + new_location.url = new_location.url.split("+", 1)[-1] + can_hash = new_location.hash + if can_hash: + # hash url WITH fragment + hash_value = self.get(new_location.url) + if not hash_value: + hash_value = self._get_file_hash(new_location) + hash_value = hash_value.encode('utf8') + if can_hash: + self.set(new_location.url, hash_value) + return hash_value.decode('utf8') + + def _get_file_hash(self, location): + h = hashlib.new(pip_shims.FAVORITE_HASH) + with vistir.open_file(location, self.session) as fp: + for chunk in iter(lambda: fp.read(8096), b""): + h.update(chunk) + return ":".join([h.name, h.hexdigest()]) + + +# pip-tools's dependency cache implementation. +class CorruptCacheError(Exception): + def __init__(self, path): + self.path = path + + def __str__(self): + lines = [ + 'The dependency cache seems to have been corrupted.', + 'Inspect, or delete, the following file:', + ' {}'.format(self.path), + ] + return os.linesep.join(lines) + + +def _key_from_req(req): + """Get an all-lowercase version of the requirement's name.""" + if hasattr(req, 'key'): + # from pkg_resources, such as installed dists for pip-sync + key = req.key + else: + # from packaging, such as install requirements from requirements.txt + key = req.name + + key = key.replace('_', '-').lower() + return key + + +def _read_cache_file(cache_file_path): + with open(cache_file_path, 'r') as cache_file: + try: + doc = json.load(cache_file) + except ValueError: + raise CorruptCacheError(cache_file_path) + + # Check version and load the contents + assert doc['__format__'] == 1, 'Unknown cache file format' + return doc['dependencies'] + + +class _JSONCache(object): + """A persistent cache backed by a JSON file. + + The cache file is written to the appropriate user cache dir for the + current platform, i.e. + + ~/.cache/pip-tools/depcache-pyX.Y.json + + Where X.Y indicates the Python version. + """ + filename_format = None + + def __init__(self, cache_dir=CACHE_DIR): + vistir.mkdir_p(cache_dir) + python_version = ".".join(str(digit) for digit in sys.version_info[:2]) + cache_filename = self.filename_format.format( + python_version=python_version, + ) + self._cache_file = os.path.join(cache_dir, cache_filename) + self._cache = None + + @property + def cache(self): + """The dictionary that is the actual in-memory cache. + + This property lazily loads the cache from disk. + """ + if self._cache is None: + self.read_cache() + return self._cache + + def as_cache_key(self, ireq): + """Given a requirement, return its cache key. + + This behavior is a little weird in order to allow backwards + compatibility with cache files. For a requirement without extras, this + will return, for example:: + + ("ipython", "2.1.0") + + For a requirement with extras, the extras will be comma-separated and + appended to the version, inside brackets, like so:: + + ("ipython", "2.1.0[nbconvert,notebook]") + """ + extras = tuple(sorted(ireq.extras)) + if not extras: + extras_string = "" + else: + extras_string = "[{}]".format(",".join(extras)) + name = _key_from_req(ireq.req) + version = get_pinned_version(ireq) + return name, "{}{}".format(version, extras_string) + + def read_cache(self): + """Reads the cached contents into memory. + """ + if os.path.exists(self._cache_file): + self._cache = _read_cache_file(self._cache_file) + else: + self._cache = {} + + def write_cache(self): + """Writes the cache to disk as JSON. + """ + doc = { + '__format__': 1, + 'dependencies': self._cache, + } + with open(self._cache_file, 'w') as f: + json.dump(doc, f, sort_keys=True) + + def clear(self): + self._cache = {} + self.write_cache() + + def __contains__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return pkgversion_and_extras in self.cache.get(pkgname, {}) + + def __getitem__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return self.cache[pkgname][pkgversion_and_extras] + + def __setitem__(self, ireq, values): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + self.cache.setdefault(pkgname, {}) + self.cache[pkgname][pkgversion_and_extras] = values + self.write_cache() + + def __delitem__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + try: + del self.cache[pkgname][pkgversion_and_extras] + except KeyError: + return + self.write_cache() + + def get(self, ireq, default=None): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default) + + +class DependencyCache(_JSONCache): + """Cache the dependency of cancidates. + """ + filename_format = "depcache-py{python_version}.json" + + +class RequiresPythonCache(_JSONCache): + """Cache a candidate's Requires-Python information. + """ + filename_format = "pyreqcache-py{python_version}.json" diff --git a/pipenv/vendor/passa/candidates.py b/pipenv/vendor/passa/candidates.py new file mode 100644 index 0000000000..d5390d65ee --- /dev/null +++ b/pipenv/vendor/passa/candidates.py @@ -0,0 +1,81 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import os +import sys + +import packaging.specifiers +import packaging.version +import requirementslib + +from ._pip import find_installation_candidates, get_vcs_ref + + +def _filter_matching_python_requirement(candidates, python_version): + for c in candidates: + try: + requires_python = c.requires_python + except AttributeError: + requires_python = c.location.requires_python + if python_version and requires_python: + # Old specifications had people setting this to single digits + # which is effectively the same as '>=digit,= distutils.log.ERROR: + f(log, level, msg, args) + + distutils.log.Log._log = _log + yield + distutils.log.Log._log = f + + +class NoopInstaller(object): + """An installer. + + This class is not designed to be instantiated by itself, but used as a + common interface for subclassing. + + An installer has two methods, `prepare()` and `install()`. Neither takes + arguments, and should be called in that order to prepare an installation + operation, and to actually install things. + """ + def prepare(self): + pass + + def install(self): + pass + + +class EditableInstaller(NoopInstaller): + """Installer to handle editable. + """ + def __init__(self, requirement): + ireq = requirement.as_ireq() + self.working_directory = ireq.setup_py_dir + self.setup_py = ireq.setup_py + + def install(self): + with vistir.cd(self.working_directory), _suppress_distutils_logs(): + # Access from Setuptools to ensure things are patched correctly. + setuptools.dist.distutils.core.run_setup( + self.setup_py, ["develop", "--no-deps"], + ) + + +class WheelInstaller(NoopInstaller): + """Installer by building a wheel. + + The wheel is built during `prepare()`, and installed in `install()`. + """ + def __init__(self, requirement, sources, paths): + self.ireq = requirement.as_ireq() + self.sources = filter_sources(requirement, sources) + self.hashes = requirement.hashes or None + self.paths = paths + self.wheel = None + + def prepare(self): + self.wheel = build_wheel(self.ireq, self.sources, self.hashes) + + def install(self): + self.wheel.install(self.paths, distlib.scripts.ScriptMaker(None, None)) diff --git a/pipenv/vendor/passa/internals/_pip_shims.py b/pipenv/vendor/passa/internals/_pip_shims.py new file mode 100644 index 0000000000..b2c7b6ea31 --- /dev/null +++ b/pipenv/vendor/passa/internals/_pip_shims.py @@ -0,0 +1,61 @@ +# -*- coding=utf-8 -*- + +"""Shims to make the pip interface more consistent accross versions. + +There are currently two members: + +* VCS_SUPPORT is an instance of VcsSupport. +* build_wheel abstracts the process to build a wheel out of a bunch parameters. +* unpack_url wraps the actual function in pip to accept modern parameters. +""" + +from __future__ import absolute_import, unicode_literals + +import pip_shims + + +def _build_wheel_pre10(ireq, output_dir, finder, wheel_cache, kwargs): + kwargs.update({"wheel_cache": wheel_cache, "session": finder.session}) + reqset = pip_shims.RequirementSet(**kwargs) + builder = pip_shims.WheelBuilder(reqset, finder) + return builder._build_one(ireq, output_dir) + + +def _build_wheel_modern(ireq, output_dir, finder, wheel_cache, kwargs): + """Build a wheel. + + * ireq: The InstallRequirement object to build + * output_dir: The directory to build the wheel in. + * finder: pip's internal Finder object to find the source out of ireq. + * kwargs: Various keyword arguments from `_prepare_wheel_building_kwargs`. + """ + kwargs.update({"progress_bar": "off", "build_isolation": False}) + with pip_shims.RequirementTracker() as req_tracker: + if req_tracker: + kwargs["req_tracker"] = req_tracker + preparer = pip_shims.RequirementPreparer(**kwargs) + builder = pip_shims.WheelBuilder(finder, preparer, wheel_cache) + return builder._build_one(ireq, output_dir) + + +def _unpack_url_pre10(*args, **kwargs): + """Shim for unpack_url in various pip versions. + + pip before 10.0 does not accept `progress_bar` here. Simply drop it. + """ + kwargs.pop("progress_bar", None) + return pip_shims.unpack_url(*args, **kwargs) + + +PIP_VERSION = pip_shims.utils._parse(pip_shims.pip_version) +VERSION_10 = pip_shims.utils._parse("10") + + +VCS_SUPPORT = pip_shims.VcsSupport() + +build_wheel = _build_wheel_modern +unpack_url = pip_shims.unpack_url + +if PIP_VERSION < VERSION_10: + build_wheel = _build_wheel_pre10 + unpack_url = _unpack_url_pre10 diff --git a/pipenv/vendor/passa/internals/caches.py b/pipenv/vendor/passa/internals/caches.py new file mode 100644 index 0000000000..6d3131fa53 --- /dev/null +++ b/pipenv/vendor/passa/internals/caches.py @@ -0,0 +1,214 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import copy +import hashlib +import json +import os +import sys + +import appdirs +import pip_shims +import requests +import vistir + +from ._pip_shims import VCS_SUPPORT +from .utils import get_pinned_version + + +CACHE_DIR = os.environ.get("PASSA_CACHE_DIR", appdirs.user_cache_dir("passa")) + + +class HashCache(pip_shims.SafeFileCache): + """Caches hashes of PyPI artifacts so we do not need to re-download them. + + Hashes are only cached when the URL appears to contain a hash in it and the + cache key includes the hash value returned from the server). This ought to + avoid ssues where the location on the server changes. + """ + def __init__(self, *args, **kwargs): + session = kwargs.pop('session', requests.session()) + self.session = session + kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache')) + super(HashCache, self).__init__(*args, **kwargs) + + def get_hash(self, location): + # If there is no location hash (i.e., md5, sha256, etc.), we don't want + # to store it. + hash_value = None + orig_scheme = location.scheme + new_location = copy.deepcopy(location) + if orig_scheme in VCS_SUPPORT.all_schemes: + new_location.url = new_location.url.split("+", 1)[-1] + can_hash = new_location.hash + if can_hash: + # hash url WITH fragment + hash_value = self.get(new_location.url) + if not hash_value: + hash_value = self._get_file_hash(new_location) + hash_value = hash_value.encode('utf8') + if can_hash: + self.set(new_location.url, hash_value) + return hash_value.decode('utf8') + + def _get_file_hash(self, location): + h = hashlib.new(pip_shims.FAVORITE_HASH) + with vistir.open_file(location, self.session) as fp: + for chunk in iter(lambda: fp.read(8096), b""): + h.update(chunk) + return ":".join([h.name, h.hexdigest()]) + + +# pip-tools's dependency cache implementation. +class CorruptCacheError(Exception): + def __init__(self, path): + self.path = path + + def __str__(self): + lines = [ + 'The dependency cache seems to have been corrupted.', + 'Inspect, or delete, the following file:', + ' {}'.format(self.path), + ] + return os.linesep.join(lines) + + +def _key_from_req(req): + """Get an all-lowercase version of the requirement's name.""" + if hasattr(req, 'key'): + # from pkg_resources, such as installed dists for pip-sync + key = req.key + else: + # from packaging, such as install requirements from requirements.txt + key = req.name + + key = key.replace('_', '-').lower() + return key + + +def _read_cache_file(cache_file_path): + with open(cache_file_path, 'r') as cache_file: + try: + doc = json.load(cache_file) + except ValueError: + raise CorruptCacheError(cache_file_path) + + # Check version and load the contents + assert doc['__format__'] == 1, 'Unknown cache file format' + return doc['dependencies'] + + +class _JSONCache(object): + """A persistent cache backed by a JSON file. + + The cache file is written to the appropriate user cache dir for the + current platform, i.e. + + ~/.cache/pip-tools/depcache-pyX.Y.json + + Where X.Y indicates the Python version. + """ + filename_format = None + + def __init__(self, cache_dir=CACHE_DIR): + vistir.mkdir_p(cache_dir) + python_version = ".".join(str(digit) for digit in sys.version_info[:2]) + cache_filename = self.filename_format.format( + python_version=python_version, + ) + self._cache_file = os.path.join(cache_dir, cache_filename) + self._cache = None + + @property + def cache(self): + """The dictionary that is the actual in-memory cache. + + This property lazily loads the cache from disk. + """ + if self._cache is None: + self.read_cache() + return self._cache + + def as_cache_key(self, ireq): + """Given a requirement, return its cache key. + + This behavior is a little weird in order to allow backwards + compatibility with cache files. For a requirement without extras, this + will return, for example:: + + ("ipython", "2.1.0") + + For a requirement with extras, the extras will be comma-separated and + appended to the version, inside brackets, like so:: + + ("ipython", "2.1.0[nbconvert,notebook]") + """ + extras = tuple(sorted(ireq.extras)) + if not extras: + extras_string = "" + else: + extras_string = "[{}]".format(",".join(extras)) + name = _key_from_req(ireq.req) + version = get_pinned_version(ireq) + return name, "{}{}".format(version, extras_string) + + def read_cache(self): + """Reads the cached contents into memory. + """ + if os.path.exists(self._cache_file): + self._cache = _read_cache_file(self._cache_file) + else: + self._cache = {} + + def write_cache(self): + """Writes the cache to disk as JSON. + """ + doc = { + '__format__': 1, + 'dependencies': self._cache, + } + with open(self._cache_file, 'w') as f: + json.dump(doc, f, sort_keys=True) + + def clear(self): + self._cache = {} + self.write_cache() + + def __contains__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return pkgversion_and_extras in self.cache.get(pkgname, {}) + + def __getitem__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return self.cache[pkgname][pkgversion_and_extras] + + def __setitem__(self, ireq, values): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + self.cache.setdefault(pkgname, {}) + self.cache[pkgname][pkgversion_and_extras] = values + self.write_cache() + + def __delitem__(self, ireq): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + try: + del self.cache[pkgname][pkgversion_and_extras] + except KeyError: + return + self.write_cache() + + def get(self, ireq, default=None): + pkgname, pkgversion_and_extras = self.as_cache_key(ireq) + return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default) + + +class DependencyCache(_JSONCache): + """Cache the dependency of cancidates. + """ + filename_format = "depcache-py{python_version}.json" + + +class RequiresPythonCache(_JSONCache): + """Cache a candidate's Requires-Python information. + """ + filename_format = "pyreqcache-py{python_version}.json" diff --git a/pipenv/vendor/passa/internals/candidates.py b/pipenv/vendor/passa/internals/candidates.py new file mode 100644 index 0000000000..d5390d65ee --- /dev/null +++ b/pipenv/vendor/passa/internals/candidates.py @@ -0,0 +1,81 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import os +import sys + +import packaging.specifiers +import packaging.version +import requirementslib + +from ._pip import find_installation_candidates, get_vcs_ref + + +def _filter_matching_python_requirement(candidates, python_version): + for c in candidates: + try: + requires_python = c.requires_python + except AttributeError: + requires_python = c.location.requires_python + if python_version and requires_python: + # Old specifications had people setting this to single digits + # which is effectively the same as '>=digit, 1} + if identifier not in identifiers and not (identifiers & routes): + continue + name = requirement.normalized_name + if requirement.extras: + # Aggregate extras from multiple routes so we can produce their + # union in the lock file. (sarugaku/passa#24) + try: + extras[name].extend(requirement.extras) + except KeyError: + extras[name] = list(requirement.extras) + entries[name] = next(iter(requirement.as_pipfile().values())) + for name, ext in extras.items(): + entries[name]["extras"] = ext + + return entries + + +class AbstractLocker(object): + """Helper class to produce a new lock file for a project. + + This is not intended for instantiation. You should use one of its concrete + subclasses instead. The class contains logic to: + + * Prepare a project for locking + * Perform the actually resolver invocation + * Convert resolver output into lock file format + * Update the project to have the new lock file + """ + def __init__(self, project): + self.project = project + self.default_requirements = _get_requirements( + project.pipfile, "packages", + ) + self.develop_requirements = _get_requirements( + project.pipfile, "dev-packages", + ) + + # This comprehension dance ensures we merge packages from both + # sections, and definitions in the default section win. + self.requirements = {k: r for k, r in itertools.chain( + self.develop_requirements.items(), + self.default_requirements.items(), + )}.values() + + self.sources = [s._data.copy() for s in project.pipfile.sources] + self.allow_prereleases = bool( + project.pipfile.get("pipenv", {}).get("allow_prereleases", False), + ) + + def __repr__(self): + return "<{0} @ {1!r}>".format(type(self).__name__, self.project.root) + + def get_provider(self): + raise NotImplementedError + + def lock(self): + """Lock specified (abstract) requirements into (concrete) candidates. + + The locking procedure consists of four stages: + + * Resolve versions and dependency graph (powered by ResolveLib). + * Walk the graph to determine "why" each candidate came to be, i.e. + what top-level requirements result in a given candidate. + * Populate hashes for resolved candidates. + * Populate markers based on dependency specifications of each + candidate, and the dependency graph. + """ + reporters.report("lock-starting", {"requirements": self.requirements}) + + provider = self.get_provider() + resolver = resolvelib.Resolver( + provider, reporters.get_reporter().build_for_resolvelib(), + ) + + with vistir.cd(self.project.root): + state = resolver.resolve(self.requirements) + + traces = trace_graph(state.graph) + reporters.report("lock-trace-ended", { + "state": state, "traces": traces, + }) + + hash_cache = HashCache() + for r in state.mapping.values(): + if not r.hashes: + r.hashes = get_hashes(hash_cache, r) + + set_metadata( + state.mapping, traces, + provider.fetched_dependencies, provider.requires_pythons, + ) + + lockfile = plette.Lockfile.with_meta_from(self.project.pipfile) + lockfile["default"] = _collect_derived_entries( + state, traces, self.default_requirements, + ) + lockfile["develop"] = _collect_derived_entries( + state, traces, self.develop_requirements, + ) + self.project.lockfile = lockfile + + +class BasicLocker(AbstractLocker): + """Basic concrete locker. + + This takes a project, generates a lock file from its Pipfile, and sets + the lock file property to the project. + """ + def get_provider(self): + return BasicProvider( + self.requirements, self.sources, self.allow_prereleases, + ) + + +class PinReuseLocker(AbstractLocker): + """A specialized locker to handle re-locking based on existing pins. + + See :class:`.providers.PinReuseProvider` for more information. + """ + def __init__(self, project): + super(PinReuseLocker, self).__init__(project) + pins = _get_requirements(project.lockfile, "develop") + pins.update(_get_requirements(project.lockfile, "default")) + for pin in pins.values(): + pin.markers = None + self.preferred_pins = pins + + def get_provider(self): + return PinReuseProvider( + self.preferred_pins, + self.requirements, self.sources, self.allow_prereleases, + ) + + +class EagerUpgradeLocker(PinReuseLocker): + """A specialized locker to handle the "eager" upgrade strategy. + + See :class:`.providers.EagerUpgradeProvider` for more + information. + """ + def __init__(self, tracked_names, *args, **kwargs): + super(EagerUpgradeLocker, self).__init__(*args, **kwargs) + self.tracked_names = tracked_names + + def get_provider(self): + return EagerUpgradeProvider( + self.tracked_names, self.preferred_pins, + self.requirements, self.sources, self.allow_prereleases, + ) diff --git a/pipenv/vendor/passa/internals/markers.py b/pipenv/vendor/passa/internals/markers.py new file mode 100644 index 0000000000..95efab95d6 --- /dev/null +++ b/pipenv/vendor/passa/internals/markers.py @@ -0,0 +1,101 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +from packaging.markers import Marker + + +def _strip_extra(elements): + """Remove the "extra == ..." operands from the list. + + This is not a comprehensive implementation, but relies on an important + characteristic of metadata generation: The "extra == ..." operand is always + associated with an "and" operator. This means that we can simply remove the + operand and the "and" operator associated with it. + """ + extra_indexes = [] + for i, element in enumerate(elements): + if isinstance(element, list): + cancelled = _strip_extra(element) + if cancelled: + extra_indexes.append(i) + elif isinstance(element, tuple) and element[0].value == "extra": + extra_indexes.append(i) + for i in reversed(extra_indexes): + del elements[i] + if i > 0 and elements[i - 1] == "and": + # Remove the "and" before it. + del elements[i - 1] + elif elements: + # This shouldn't ever happen, but is included for completeness. + # If there is not an "and" before this element, try to remove the + # operator after it. + del elements[0] + return (not elements) + + +def get_without_extra(marker): + """Build a new marker without the `extra == ...` part. + + The implementation relies very deep into packaging's internals, but I don't + have a better way now (except implementing the whole thing myself). + + This could return `None` if the `extra == ...` part is the only one in the + input marker. + """ + # TODO: Why is this very deep in the internals? Why is a better solution + # implementing it yourself when someone is already maintaining a codebase + # for this? It's literally a grammar implementation that is required to + # meet the demands of a pep... -d + if not marker: + return None + marker = Marker(str(marker)) + elements = marker._markers + _strip_extra(elements) + if elements: + return marker + return None + + +def _markers_collect_extras(markers, collection): + # Optimization: the marker element is usually appended at the end. + for el in reversed(markers): + if (isinstance(el, tuple) and + el[0].value == "extra" and + el[1].value == "=="): + collection.add(el[2].value) + elif isinstance(el, list): + _markers_collect_extras(el, collection) + + +def get_contained_extras(marker): + """Collect "extra == ..." operands from a marker. + + Returns a list of str. Each str is a speficied extra in this marker. + """ + if not marker: + return set() + marker = Marker(str(marker)) + extras = set() + _markers_collect_extras(marker._markers, extras) + return extras + + +def _markers_contains_extra(markers): + # Optimization: the marker element is usually appended at the end. + for element in reversed(markers): + if isinstance(element, tuple) and element[0].value == "extra": + return True + elif isinstance(element, list): + if _markers_contains_extra(element): + return True + return False + + +def contains_extra(marker): + """Check whehter a marker contains an "extra == ..." operand. + """ + if not marker: + return False + marker = Marker(str(marker)) + return _markers_contains_extra(marker._markers) diff --git a/pipenv/vendor/passa/internals/metadata.py b/pipenv/vendor/passa/internals/metadata.py new file mode 100644 index 0000000000..9709c5355f --- /dev/null +++ b/pipenv/vendor/passa/internals/metadata.py @@ -0,0 +1,169 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import copy +import itertools + +import packaging.markers +import packaging.specifiers +import vistir +import vistir.misc + +from .markers import get_without_extra +from .specifiers import cleanup_pyspecs, pyspec_from_markers + + +def dedup_markers(s): + # TODO: Implement better logic. + deduped = sorted(vistir.misc.dedup(s)) + return deduped + + +class MetaSet(object): + """Representation of a "metadata set". + + This holds multiple metadata representaions. Each metadata representation + includes a marker, and a specifier set of Python versions required. + """ + def __init__(self): + self.markerset = frozenset() + self.pyspecset = packaging.specifiers.SpecifierSet() + + def __repr__(self): + return "MetaSet(markerset={0!r}, pyspecset={1!r})".format( + ",".join(sorted(self.markerset)), str(self.pyspecset), + ) + + def __str__(self): + pyspecs = set() + markerset = set() + for m in self.markerset: + marker_specs = pyspec_from_markers(packaging.markers.Marker(m)) + if marker_specs: + pyspecs.add(marker_specs) + else: + markerset.add(m) + if pyspecs: + self.pyspecset._specs &= pyspecs + self.markerset = frozenset(markerset) + return " and ".join(dedup_markers(itertools.chain( + # Make sure to always use the same quotes so we can dedup properly. + ( + "{0}".format(ms) if " or " in ms else ms + for ms in (str(m).replace('"', "'") for m in self.markerset) + ), + ( + "python_version {0[0]} '{0[1]}'".format(spec) + for spec in cleanup_pyspecs(self.pyspecset) + ), + ))) + + def __bool__(self): + return bool(self.markerset or self.pyspecset) + + def __nonzero__(self): # Python 2. + return self.__bool__() + + def __or__(self, pair): + marker, specset = pair + markerset = set(self.markerset) + if marker: + marker_specs = pyspec_from_markers(marker) + if not marker_specs: + markerset.add(str(marker)) + else: + specset._specs &= marker_specs + metaset = MetaSet() + metaset.markerset = frozenset(markerset) + # TODO: Implement some logic to clean up dups like '3.0.*' and '3.0'. + metaset.pyspecset &= self.pyspecset & specset + return metaset + + +def _build_metasets(dependencies, pythons, key, trace, all_metasets): + all_parent_metasets = [] + for route in trace: + parent = route[-1] + try: + parent_metasets = all_metasets[parent] + except KeyError: # Parent not calculated yet. Wait for it. + return + all_parent_metasets.append((parent, parent_metasets)) + + metaset_iters = [] + for parent, parent_metasets in all_parent_metasets: + r = dependencies[parent][key] + python = pythons[key] + metaset = ( + get_without_extra(r.markers), + packaging.specifiers.SpecifierSet(python), + ) + metaset_iters.append( + parent_metaset | metaset + for parent_metaset in parent_metasets + ) + return list(itertools.chain.from_iterable(metaset_iters)) + + +def _calculate_metasets_mapping(dependencies, pythons, traces): + all_metasets = {None: [MetaSet()]} + + del traces[None] + while traces: + new_metasets = {} + for key, trace in traces.items(): + assert key not in all_metasets, key # Sanity check for debug. + metasets = _build_metasets( + dependencies, pythons, key, trace, all_metasets, + ) + if metasets is None: + continue + new_metasets[key] = metasets + if not new_metasets: + break # No progress? Deadlocked. Give up. + all_metasets.update(new_metasets) + for key in new_metasets: + del traces[key] + + return all_metasets + + +def _format_metasets(metasets): + # If there is an unconditional route, this needs to be unconditional. + if not metasets or not all(metasets): + return None + + # This extra str(Marker()) call helps simplify the expression. + return str(packaging.markers.Marker(" or ".join( + "{0}".format(s) if " and " in s else s + for s in dedup_markers(str(metaset) for metaset in metasets + if metaset) + ))) + + +def set_metadata(candidates, traces, dependencies, pythons): + """Add "metadata" to candidates based on the dependency tree. + + Metadata for a candidate includes markers and a specifier for Python + version requirements. + + :param candidates: A key-candidate mapping. Candidates in the mapping will + have their markers set. + :param traces: A graph trace (produced by `traces.trace_graph`) providing + information about dependency relationships between candidates. + :param dependencies: A key-collection mapping containing what dependencies + each candidate in `candidates` requested. + :param pythons: A key-str mapping containing Requires-Python information + of each candidate. + + Keys in mappings and entries in the trace are identifiers of a package, as + implemented by the `identify` method of the resolver's provider. + + The candidates are modified in-place. + """ + metasets_mapping = _calculate_metasets_mapping( + dependencies, pythons, copy.deepcopy(traces), + ) + for key, candidate in candidates.items(): + candidate.markers = _format_metasets(metasets_mapping[key]) diff --git a/pipenv/vendor/passa/internals/providers.py b/pipenv/vendor/passa/internals/providers.py new file mode 100644 index 0000000000..7341839265 --- /dev/null +++ b/pipenv/vendor/passa/internals/providers.py @@ -0,0 +1,184 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import os + +import resolvelib + +from .candidates import find_candidates +from .dependencies import get_dependencies +from .utils import filter_sources, identify_requirment, strip_extras + + +PROTECTED_PACKAGE_NAMES = {"pip", "setuptools"} + + +class BasicProvider(resolvelib.AbstractProvider): + """Provider implementation to interface with `requirementslib.Requirement`. + """ + def __init__(self, root_requirements, sources, allow_prereleases): + self.sources = sources + self.allow_prereleases = bool(allow_prereleases) + self.invalid_candidates = set() + + # Remember requirements of each pinned candidate. The resolver calls + # `get_dependencies()` only when it wants to repin, so the last time + # the dependencies we got when it is last called on a package, are + # the set used by the resolver. We use this later to trace how a given + # dependency is specified by a package. + self.fetched_dependencies = {None: { + self.identify(r): r for r in root_requirements + }} + # TODO: Find a way to resolve with multiple versions (by tricking + # runtime) Include multiple keys in pipfiles? + self.requires_pythons = {None: ""} # TODO: Don't use any value + + def identify(self, dependency): + return identify_requirment(dependency) + + def get_preference(self, resolution, candidates, information): + # TODO: Provide better sorting logic. This simply resolve the ones with + # less choices first. Not sophisticated, but sounds reasonable? + return len(candidates) + + def find_matches(self, requirement): + # TODO: Implement per-package prereleases flag. (pypa/pipenv#1696) + allow_prereleases = self.allow_prereleases + sources = filter_sources(requirement, self.sources) + candidates = find_candidates(requirement, sources, allow_prereleases) + return candidates + + def is_satisfied_by(self, requirement, candidate): + # A non-named requirement has exactly one candidate, as implemented in + # `find_matches()`. It must match. + if not requirement.is_named: + return True + + # Optimization: Everything matches if there are no specifiers. + if not requirement.specifiers: + return True + + # We can't handle old version strings before PEP 440. Drop them all. + # Practically this shouldn't be a problem if the user is specifying a + # remotely reasonable dependency not from before 2013. + candidate_line = candidate.as_line() + if candidate_line in self.invalid_candidates: + return False + try: + version = candidate.get_specifier().version + except ValueError: + print('ignoring invalid version {}'.format(candidate_line)) + self.invalid_candidates.add(candidate_line) + return False + + return requirement.as_ireq().specifier.contains(version) + + def get_dependencies(self, candidate): + sources = filter_sources(candidate, self.sources) + try: + dependencies, requires_python = get_dependencies( + candidate, sources=sources, + ) + except Exception as e: + if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): + raise + print("failed to get dependencies for {0!r}: {1}".format( + candidate.as_line(include_hashes=False), e, + )) + dependencies = [] + requires_python = "" + # Exclude protected packages from the list. This prevents those + # packages from being locked, unless the user is actually working on + # them, and explicitly lists them as top-level requirements -- those + # packages are not added via this code path. (sarugaku/passa#15) + dependencies = [ + dependency for dependency in dependencies + if dependency.normalized_name not in PROTECTED_PACKAGE_NAMES + ] + if candidate.extras: + # HACK: If this candidate has extras, add the original candidate + # (same pinned version, no extras) as its dependency. This ensures + # the same package with different extras (treated as distinct by + # the resolver) have the same version. (sarugaku/passa#4) + dependencies.append(strip_extras(candidate)) + candidate_key = self.identify(candidate) + self.fetched_dependencies[candidate_key] = { + self.identify(r): r for r in dependencies + } + self.requires_pythons[candidate_key] = requires_python + return dependencies + + +class PinReuseProvider(BasicProvider): + """A provider that reuses preferred pins if possible. + + This is used to implement "add", "remove", and "only-if-needed upgrade", + where already-pinned candidates in Pipfile.lock should be preferred. + """ + def __init__(self, preferred_pins, *args, **kwargs): + super(PinReuseProvider, self).__init__(*args, **kwargs) + self.preferred_pins = preferred_pins + + def find_matches(self, requirement): + candidates = super(PinReuseProvider, self).find_matches(requirement) + try: + # Add the preferred pin. Remember the resolve prefer candidates + # at the end of the list, so the most preferred should be last. + candidates.append(self.preferred_pins[self.identify(requirement)]) + except KeyError: + pass + return candidates + + +class EagerUpgradeProvider(PinReuseProvider): + """A specialized provider to handle an "eager" upgrade strategy. + + An eager upgrade tries to upgrade not only packages specified, but also + their dependencies (recursively). This contrasts to the "only-if-needed" + default, which only promises to upgrade the specified package, and + prevents touching anything else if at all possible. + + The provider is implemented as to keep track of all dependencies of the + specified packages to upgrade, and free their pins when it has a chance. + """ + def __init__(self, tracked_names, *args, **kwargs): + super(EagerUpgradeProvider, self).__init__(*args, **kwargs) + self.tracked_names = set(tracked_names) + for name in tracked_names: + self.preferred_pins.pop(name, None) + + # HACK: Set this special flag to distinguish preferred pins from + # regular, to tell the resolver to NOT use them for tracked packages. + for pin in self.preferred_pins.values(): + pin._preferred_by_provider = True + + def is_satisfied_by(self, requirement, candidate): + # If this is a tracking package, tell the resolver out of using the + # preferred pin, and into a "normal" candidate selection process. + if (self.identify(requirement) in self.tracked_names and + getattr(candidate, "_preferred_by_provider", False)): + return False + return super(EagerUpgradeProvider, self).is_satisfied_by( + requirement, candidate, + ) + + def get_dependencies(self, candidate): + # If this package is being tracked for upgrade, remove pins of its + # dependencies, and start tracking these new packages. + dependencies = super(EagerUpgradeProvider, self).get_dependencies( + candidate, + ) + if self.identify(candidate) in self.tracked_names: + for dependency in dependencies: + name = self.identify(dependency) + self.tracked_names.add(name) + self.preferred_pins.pop(name, None) + return dependencies + + def get_preference(self, resolution, candidates, information): + # Resolve tracking packages so we have a chance to unpin them first. + name = self.identify(candidates[0]) + if name in self.tracked_names: + return -1 + return len(candidates) diff --git a/pipenv/vendor/passa/internals/specifiers.py b/pipenv/vendor/passa/internals/specifiers.py new file mode 100644 index 0000000000..75afb6adbc --- /dev/null +++ b/pipenv/vendor/passa/internals/specifiers.py @@ -0,0 +1,136 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import itertools +import operator + +from packaging.specifiers import SpecifierSet, Specifier +from vistir.misc import dedup + + +def _tuplize_version(version): + return tuple(int(x) for x in version.split(".")) + + +def _format_version(version): + return ".".join(str(i) for i in version) + + +# Prefer [x,y) ranges. +REPLACE_RANGES = {">": ">=", "<=": "<"} + + +def _format_pyspec(specifier): + if isinstance(specifier, str): + if not any(op in specifier for op in Specifier._operators.keys()): + specifier = "=={0}".format(specifier) + specifier = Specifier(specifier) + if specifier.operator == "==" and specifier.version.endswith(".*"): + specifier = Specifier("=={0}".format(specifier.version[:-2])) + try: + op = REPLACE_RANGES[specifier.operator] + except KeyError: + return specifier + version = specifier.version.replace(".*", "") + curr_tuple = _tuplize_version(version) + try: + next_tuple = (curr_tuple[0], curr_tuple[1] + 1) + except IndexError: + next_tuple = (curr_tuple[0], 1) + specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple))) + return specifier + + +def _get_specs(specset): + if isinstance(specset, Specifier): + specset = str(specset) + if isinstance(specset, str): + specset = SpecifierSet(specset.replace(".*", "")) + return [ + (spec._spec[0], _tuplize_version(spec._spec[1])) + for spec in getattr(specset, "_specs", []) + ] + + +def _group_by_op(specs): + specs = [_get_specs(x) for x in list(specs)] + flattened = [(op, version) for spec in specs for op, version in spec] + specs = sorted(flattened, key=operator.itemgetter(1)) + grouping = itertools.groupby(specs, key=operator.itemgetter(0)) + return grouping + + +def cleanup_pyspecs(specs, joiner="or"): + specs = {_format_pyspec(spec) for spec in specs} + # for != operator we want to group by version + # if all are consecutive, join as a list + results = set() + for op, versions in _group_by_op(specs): + versions = [version[1] for version in versions] + versions = sorted(dedup(versions)) + # if we are doing an or operation, we need to use the min for >= + # this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6 + # if we do an AND operation we need to use MAX to be more selective + if op in (">", ">="): + if joiner == "or": + results.add((op, _format_version(min(versions)))) + else: + results.add((op, _format_version(max(versions)))) + # we use inverse logic here so we will take the max value if we are + # using OR but the min value if we are using AND + elif op in ("<=", "<"): + if joiner == "or": + results.add((op, _format_version(max(versions)))) + else: + results.add((op, _format_version(min(versions)))) + # leave these the same no matter what operator we use + elif op in ("!=", "==", "~="): + version_list = sorted( + "{0}".format(_format_version(version)) + for version in versions + ) + version = ", ".join(version_list) + if len(version_list) == 1: + results.add((op, version)) + elif op == "!=": + results.add(("not in", version)) + elif op == "==": + results.add(("in", version)) + else: + specifier = SpecifierSet(",".join(sorted( + "{0}".format(op, v) for v in version_list + )))._specs + for s in specifier: + results &= (specifier._spec[0], specifier._spec[1]) + else: + if len(version) == 1: + results.add((op, version)) + else: + specifier = SpecifierSet("{0}".format(version))._specs + for s in specifier: + results |= (specifier._spec[0], specifier._spec[1]) + return results + + +def pyspec_from_markers(marker): + if marker._markers[0][0] != 'python_version': + return + op = marker._markers[0][1].value + version = marker._markers[0][2].value + specset = set() + if op == "in": + specset.update( + Specifier("=={0}".format(v.strip())) + for v in version.split(",") + ) + elif op == "not in": + specset.update( + Specifier("!={0}".format(v.strip())) + for v in version.split(",") + ) + else: + specset.add(Specifier("".join([op, version]))) + if specset: + return specset + return None diff --git a/pipenv/vendor/passa/internals/synchronizers.py b/pipenv/vendor/passa/internals/synchronizers.py new file mode 100644 index 0000000000..2a1f1d18b2 --- /dev/null +++ b/pipenv/vendor/passa/internals/synchronizers.py @@ -0,0 +1,214 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import collections +import contextlib +import os +import sys +import sysconfig + +import pkg_resources + +import packaging.markers +import packaging.version +import requirementslib + +from ._pip import uninstall_requirement, EditableInstaller, WheelInstaller + + +def _is_installation_local(name): + """Check whether the distribution is in the current Python installation. + + This is used to distinguish packages seen by a virtual environment. A venv + may be able to see global packages, but we don't want to mess with them. + """ + location = pkg_resources.working_set.by_key[name].location + return os.path.commonprefix([location, sys.prefix]) == sys.prefix + + +def _is_up_to_date(distro, version): + # This is done in strings to avoid type mismatches caused by vendering. + return str(version) == str(packaging.version.parse(distro.version)) + + +GroupCollection = collections.namedtuple("GroupCollection", [ + "uptodate", "outdated", "noremove", "unneeded", +]) + + +def _group_installed_names(packages): + """Group locally installed packages based on given specifications. + + `packages` is a name-package mapping that are used as baseline to + determine how the installed package should be grouped. + + Returns a 3-tuple of disjoint sets, all containing names of installed + packages: + + * `uptodate`: These match the specifications. + * `outdated`: These installations are specified, but don't match the + specifications in `packages`. + * `unneeded`: These are installed, but not specified in `packages`. + """ + groupcoll = GroupCollection(set(), set(), set(), set()) + + for distro in pkg_resources.working_set: + name = distro.key + try: + package = packages[name] + except KeyError: + groupcoll.unneeded.add(name) + continue + + r = requirementslib.Requirement.from_pipfile(name, package) + if not r.is_named: + # Always mark non-named. I think pip does something similar? + groupcoll.outdated.add(name) + elif not _is_up_to_date(distro, r.get_version()): + groupcoll.outdated.add(name) + else: + groupcoll.uptodate.add(name) + + return groupcoll + + +@contextlib.contextmanager +def _remove_package(name): + if name is None or not _is_installation_local(name): + yield + return + r = requirementslib.Requirement.from_line(name) + with uninstall_requirement(r.as_ireq(), auto_confirm=True, verbose=False): + yield + + +def _get_packages(lockfile, default, develop): + # Don't need to worry about duplicates because only extras can differ. + # Extras don't matter because they only affect dependencies, and we + # don't install dependencies anyway! + packages = {} + if default: + packages.update(lockfile.default._data) + if develop: + packages.update(lockfile.develop._data) + return packages + + +def _build_paths(): + """Prepare paths for distlib.wheel.Wheel to install into. + """ + paths = sysconfig.get_paths() + return { + "prefix": sys.prefix, + "data": paths["data"], + "scripts": paths["scripts"], + "headers": paths["include"], + "purelib": paths["purelib"], + "platlib": paths["platlib"], + } + + +PROTECTED_FROM_CLEAN = {"setuptools", "pip"} + + +def _clean(names): + cleaned = set() + for name in names: + if name in PROTECTED_FROM_CLEAN: + continue + with _remove_package(name): + pass + cleaned.add(name) + return cleaned + + +class Synchronizer(object): + """Helper class to install packages from a project's lock file. + """ + def __init__(self, project, default, develop, clean_unneeded): + self._root = project.root # Only for repr. + self.packages = _get_packages(project.lockfile, default, develop) + self.sources = project.lockfile.meta.sources._data + self.paths = _build_paths() + self.clean_unneeded = clean_unneeded + + def __repr__(self): + return "<{0} @ {1!r}>".format(type(self).__name__, self._root) + + def sync(self): + groupcoll = _group_installed_names(self.packages) + + installed = set() + updated = set() + cleaned = set() + + # TODO: Show a prompt to confirm cleaning. We will need to implement a + # reporter pattern for this as well. + if self.clean_unneeded: + names = _clean(groupcoll.unneeded) + cleaned.update(names) + + # TODO: Specify installation order? (pypa/pipenv#2274) + installers = [] + for name, package in self.packages.items(): + r = requirementslib.Requirement.from_pipfile(name, package) + name = r.normalized_name + if name in groupcoll.uptodate: + continue + markers = r.markers + if markers and not packaging.markers.Marker(markers).evaluate(): + continue + r.markers = None + if r.editable: + installer = EditableInstaller(r) + else: + installer = WheelInstaller(r, self.sources, self.paths) + try: + installer.prepare() + except Exception as e: + if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): + raise + print("failed to prepare {0!r}: {1}".format( + r.as_line(include_hashes=False), e, + )) + else: + installers.append((name, installer)) + + for name, installer in installers: + if name in groupcoll.outdated: + name_to_remove = name + else: + name_to_remove = None + try: + with _remove_package(name_to_remove): + installer.install() + except Exception as e: + if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): + raise + print("failed to install {0!r}: {1}".format( + r.as_line(include_hashes=False), e, + )) + continue + if name in groupcoll.outdated or name in groupcoll.noremove: + updated.add(name) + else: + installed.add(name) + + return installed, updated, cleaned + + +class Cleaner(object): + """Helper class to clean packages not in a project's lock file. + """ + def __init__(self, project, default, develop): + self._root = project.root # Only for repr. + self.packages = _get_packages(project.lockfile, default, develop) + + def __repr__(self): + return "<{0} @ {1!r}>".format(type(self).__name__, self._root) + + def clean(self): + groupcoll = _group_installed_names(self.packages) + _clean(groupcoll.unneeded) + return groupcoll.unneeded diff --git a/pipenv/vendor/passa/internals/traces.py b/pipenv/vendor/passa/internals/traces.py new file mode 100644 index 0000000000..9715db975b --- /dev/null +++ b/pipenv/vendor/passa/internals/traces.py @@ -0,0 +1,40 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + + +def _trace_visit_vertex(graph, current, target, visited, path, paths): + if current == target: + paths.append(path) + return + for v in graph.iter_children(current): + if v == current or v in visited: + continue + next_path = path + [current] + next_visited = visited | {current} + _trace_visit_vertex(graph, v, target, next_visited, next_path, paths) + + +def trace_graph(graph): + """Build a collection of "traces" for each package. + + A trace is a list of names that eventually leads to the package. For + example, if A and B are root dependencies, A depends on C and D, B + depends on C, and C depends on D, the return value would be like:: + + { + None: [], + "A": [None], + "B": [None], + "C": [[None, "A"], [None, "B"]], + "D": [[None, "B", "C"], [None, "A"]], + } + """ + result = {None: []} + for vertex in graph: + result[vertex] = [] + for root in graph.iter_children(None): + paths = [] + _trace_visit_vertex(graph, root, vertex, {None}, [None], paths) + result[vertex].extend(paths) + return result diff --git a/pipenv/vendor/passa/internals/utils.py b/pipenv/vendor/passa/internals/utils.py new file mode 100644 index 0000000000..d23a10c789 --- /dev/null +++ b/pipenv/vendor/passa/internals/utils.py @@ -0,0 +1,106 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + + +def identify_requirment(r): + """Produce an identifier for a requirement to use in the resolver. + + Note that we are treating the same package with different extras as + distinct. This allows semantics like "I only want this extra in + development, not production". + + This also makes the resolver's implementation much simpler, with the minor + costs of possibly needing a few extra resolution steps if we happen to have + the same package apprearing multiple times. + """ + return "{0}{1}".format(r.normalized_name, r.extras_as_pip) + + +def get_pinned_version(ireq): + """Get the pinned version of an InstallRequirement. + + An InstallRequirement is considered pinned if: + + - Is not editable + - It has exactly one specifier + - That specifier is "==" + - The version does not contain a wildcard + + Examples: + django==1.8 # pinned + django>1.8 # NOT pinned + django~=1.8 # NOT pinned + django==1.* # NOT pinned + + Raises `TypeError` if the input is not a valid InstallRequirement, or + `ValueError` if the InstallRequirement is not pinned. + """ + try: + specifier = ireq.specifier + except AttributeError: + raise TypeError("Expected InstallRequirement, not {}".format( + type(ireq).__name__, + )) + + if ireq.editable: + raise ValueError("InstallRequirement is editable") + if not specifier: + raise ValueError("InstallRequirement has no version specification") + if len(specifier._specs) != 1: + raise ValueError("InstallRequirement has multiple specifications") + + op, version = next(iter(specifier._specs))._spec + if op not in ('==', '===') or version.endswith('.*'): + raise ValueError("InstallRequirement not pinned (is {0!r})".format( + op + version, + )) + + return version + + +def is_pinned(ireq): + """Returns whether an InstallRequirement is a "pinned" requirement. + + An InstallRequirement is considered pinned if: + + - Is not editable + - It has exactly one specifier + - That specifier is "==" + - The version does not contain a wildcard + + Examples: + django==1.8 # pinned + django>1.8 # NOT pinned + django~=1.8 # NOT pinned + django==1.* # NOT pinned + """ + try: + get_pinned_version(ireq) + except (TypeError, ValueError): + return False + return True + + +def filter_sources(requirement, sources): + """Returns a filtered list of sources for this requirement. + + This considers the index specified by the requirement, and returns only + matching source entries if there is at least one. + """ + if not sources or not requirement.index: + return sources + filtered_sources = [ + source for source in sources + if source.get("name") == requirement.index + ] + return filtered_sources or sources + + +def strip_extras(requirement): + """Returns a new requirement object with extras removed. + """ + line = requirement.as_line() + new = type(requirement).from_line(line) + new.extras = None + return new diff --git a/pipenv/vendor/passa/lockers.py b/pipenv/vendor/passa/lockers.py new file mode 100644 index 0000000000..4ab4cc3bda --- /dev/null +++ b/pipenv/vendor/passa/lockers.py @@ -0,0 +1,182 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import itertools + +import plette +import requirementslib +import resolvelib +import vistir + +from .caches import HashCache +from .hashes import get_hashes +from .metadata import set_metadata +from .providers import BasicProvider, EagerUpgradeProvider, PinReuseProvider +from .reporters import StdOutReporter +from .traces import trace_graph +from .utils import identify_requirment + + +def _get_requirements(model, section_name): + """Produce a mapping of identifier: requirement from the section. + """ + if not model: + return {} + return {identify_requirment(r): r for r in ( + requirementslib.Requirement.from_pipfile(name, package._data) + for name, package in model.get(section_name, {}).items() + )} + + +def _iter_derived_entries(state, traces, names): + """Produce a mapping containing all candidates derived from `names`. + + `name` should provide a collection of requirement identifications from + a section (i.e. `packages` or `dev-packages`). This function uses `trace` + to filter out candidates in the state that are present because of an entry + in that collection. + """ + if not names: + return + names = set(names) + for name, requirement in state.mapping.items(): + routes = {trace[1] for trace in traces[name] if len(trace) > 1} + if name not in names and not (names & routes): + continue + yield ( + requirement.normalized_name, + next(iter(requirement.as_pipfile().values())) + ) + + +class AbstractLocker(object): + """Helper class to produce a new lock file for a project. + + This is not intended for instantiation. You should use one of its concrete + subclasses instead. The class contains logic to: + + * Prepare a project for locking + * Perform the actually resolver invocation + * Convert resolver output into lock file format + * Update the project to have the new lock file + """ + def __init__(self, project): + self.project = project + self.default_requirements = _get_requirements( + project.pipfile, "packages", + ) + self.develop_requirements = _get_requirements( + project.pipfile, "dev-packages", + ) + + # This comprehension dance ensures we merge packages from both + # sections, and definitions in the default section win. + self.requirements = {k: r for k, r in itertools.chain( + self.develop_requirements.items(), + self.default_requirements.items(), + )}.values() + + self.sources = [s._data.copy() for s in project.pipfile.sources] + self.allow_prereleases = bool( + project.pipfile.get("pipenv", {}).get("allow_prereleases", False), + ) + + def __repr__(self): + return "<{0} @ {1!r}>".format(type(self).__name__, self.project.root) + + def get_provider(self): + raise NotImplementedError + + def get_reporter(self): + # TODO: Build SpinnerReporter, and use this only in verbose mode. + return StdOutReporter(self.requirements) + + def lock(self): + """Lock specified (abstract) requirements into (concrete) candidates. + + The locking procedure consists of four stages: + + * Resolve versions and dependency graph (powered by ResolveLib). + * Walk the graph to determine "why" each candidate came to be, i.e. + what top-level requirements result in a given candidate. + * Populate hashes for resolved candidates. + * Populate markers based on dependency specifications of each + candidate, and the dependency graph. + """ + provider = self.get_provider() + reporter = self.get_reporter() + resolver = resolvelib.Resolver(provider, reporter) + + with vistir.cd(self.project.root): + state = resolver.resolve(self.requirements) + + traces = trace_graph(state.graph) + + hash_cache = HashCache() + for r in state.mapping.values(): + if not r.hashes: + r.hashes = get_hashes(hash_cache, r) + + set_metadata( + state.mapping, traces, + provider.fetched_dependencies, provider.requires_pythons, + ) + + lockfile = plette.Lockfile.with_meta_from(self.project.pipfile) + lockfile["default"] = dict(_iter_derived_entries( + state, traces, self.default_requirements, + )) + lockfile["develop"] = dict(_iter_derived_entries( + state, traces, self.develop_requirements, + )) + self.project.lockfile = lockfile + + +class BasicLocker(AbstractLocker): + """Basic concrete locker. + + This takes a project, generates a lock file from its Pipfile, and sets + the lock file property to the project. + """ + def get_provider(self): + return BasicProvider( + self.requirements, self.sources, self.allow_prereleases, + ) + + +class PinReuseLocker(AbstractLocker): + """A specialized locker to handle re-locking based on existing pins. + + See :class:`passa.providers.PinReuseProvider` for more information. + """ + def __init__(self, project): + super(PinReuseLocker, self).__init__(project) + pins = _get_requirements(project.lockfile, "develop") + pins.update(_get_requirements(project.lockfile, "default")) + for pin in pins.values(): + pin.markers = None + self.preferred_pins = pins + + def get_provider(self): + return PinReuseProvider( + self.preferred_pins, + self.requirements, self.sources, self.allow_prereleases, + ) + + +class EagerUpgradeLocker(PinReuseLocker): + """A specialized locker to handle the "eager" upgrade strategy. + + See :class:`passa.providers.EagerUpgradeProvider` for more + information. + """ + def __init__(self, tracked_names, *args, **kwargs): + super(EagerUpgradeLocker, self).__init__(*args, **kwargs) + self.tracked_names = tracked_names + + def get_provider(self): + return EagerUpgradeProvider( + self.tracked_names, self.preferred_pins, + self.requirements, self.sources, self.allow_prereleases, + ) diff --git a/pipenv/vendor/passa/locking.py b/pipenv/vendor/passa/locking.py new file mode 100644 index 0000000000..e4b6ced57f --- /dev/null +++ b/pipenv/vendor/passa/locking.py @@ -0,0 +1,105 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import itertools + +from plette import Lockfile +from requirementslib import Requirement +from resolvelib import Resolver + +from .caches import HashCache +from .hashes import get_hashes +from .metadata import set_metadata +from .providers import RequirementsLibProvider +from .reporters import StdOutReporter +from .traces import trace_graph +from .utils import identify_requirment + + +def resolve_requirements(requirements, sources, pins, allow_pre): + """Lock specified (abstract) requirements into (concrete) candidates. + + The locking procedure consists of four stages: + + * Resolve versions and dependency graph (powered by ResolveLib). + * Walk the graph to determine "why" each candidate came to be, i.e. what + top-level requirements result in a given candidate. + * Populate hashes for resolved candidates. + * Populate markers based on dependency specifications of each candidate, + and the dependency graph. + """ + provider = RequirementsLibProvider(requirements, sources, pins, allow_pre) + reporter = StdOutReporter(requirements) + resolver = Resolver(provider, reporter) + + state = resolver.resolve(requirements) + traces = trace_graph(state.graph) + + hash_cache = HashCache() + for r in state.mapping.values(): + if not r.hashes: + r.hashes = get_hashes(hash_cache, r) + + set_metadata( + state.mapping, traces, + provider.fetched_dependencies, provider.requires_pythons, + ) + return state, traces + + +def _get_requirements(pipfile, section_name): + """Produce a mapping of identifier: requirement from the section. + """ + return {identify_requirment(r): r for r in ( + Requirement.from_pipfile(name, package._data) + for name, package in pipfile.get(section_name, {}).items() + )} + + +def _get_derived_entries(state, traces, names): + """Produce a mapping containing all candidates derived from `names`. + + `name` should provide a collection of requirement identifications from + a section (i.e. `packages` or `dev-packages`). This function uses `trace` + to filter out candidates in the state that are present because of an entry + in that collection. + """ + if not names: + return {} + return_map = {} + for req_name_from_state, req in state.mapping.items(): + req_traces = [trace[1] for trace in traces[req_name_from_state] if len(trace) > 1] + if req_name_from_state in names or len(set(names) & set(req_traces)): + return_map[req.normalized_name] = next(iter(req.as_pipfile().values())) + return return_map + + +def build_lockfile(pipfile, lockfile): + default_reqs = _get_requirements(pipfile, "packages") + develop_reqs = _get_requirements(pipfile, "dev-packages") + + pins = {} + if lockfile: + pins = _get_requirements(lockfile, "develop") + pins.update(_get_requirements(lockfile, "default")) + + # This comprehension dance ensures we merge packages from both + # sections, and definitions in the default section win. + requirements = {k: r for k, r in itertools.chain( + develop_reqs.items(), default_reqs.items(), + )}.values() + + sources = [s._data.copy() for s in pipfile.sources] + try: + allow_prereleases = bool(pipfile["pipenv"]["allow_prereleases"]) + except (KeyError, TypeError): + allow_prereleases = False + state, traces = resolve_requirements( + requirements, sources, pins, allow_prereleases, + ) + + new_lock = Lockfile.with_meta_from(pipfile) + new_lock["default"] = _get_derived_entries(state, traces, default_reqs) + new_lock["develop"] = _get_derived_entries(state, traces, develop_reqs) + return new_lock diff --git a/pipenv/vendor/passa/markers.py b/pipenv/vendor/passa/markers.py new file mode 100644 index 0000000000..5f6f37d6f9 --- /dev/null +++ b/pipenv/vendor/passa/markers.py @@ -0,0 +1,228 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import itertools +import operator + +import vistir + +from packaging.specifiers import SpecifierSet, Specifier +from packaging.markers import Marker + + +PYTHON_BOUNDARIES = {2: 7, 3: 9} + + +def _strip_extra(elements): + """Remove the "extra == ..." operands from the list. + + This is not a comprehensive implementation, but relies on an important + characteristic of metadata generation: The "extra == ..." operand is always + associated with an "and" operator. This means that we can simply remove the + operand and the "and" operator associated with it. + """ + extra_indexes = [] + for i, element in enumerate(elements): + if isinstance(element, list): + cancelled = _strip_extra(element) + if cancelled: + extra_indexes.append(i) + elif isinstance(element, tuple) and element[0].value == "extra": + extra_indexes.append(i) + for i in reversed(extra_indexes): + del elements[i] + if i > 0 and elements[i - 1] == "and": + # Remove the "and" before it. + del elements[i - 1] + elif elements: + # This shouldn't ever happen, but is included for completeness. + # If there is not an "and" before this element, try to remove the + # operator after it. + del elements[0] + return (not elements) + + +def get_without_extra(marker): + """Build a new marker without the `extra == ...` part. + + The implementation relies very deep into packaging's internals, but I don't + have a better way now (except implementing the whole thing myself). + + This could return `None` if the `extra == ...` part is the only one in the + input marker. + """ + # TODO: Why is this very deep in the internals? Why is a better solution + # implementing it yourself when someone is already maintaining a codebase + # for this? It's literally a grammar implementation that is required to + # meet the demands of a pep... -d + if not marker: + return None + marker = Marker(str(marker)) + elements = marker._markers + _strip_extra(elements) + if elements: + return marker + return None + + +def _markers_collect_extras(markers, collection): + # Optimization: the marker element is usually appended at the end. + for el in reversed(markers): + if (isinstance(el, tuple) and + el[0].value == "extra" and + el[1].value == "=="): + collection.add(el[2].value) + elif isinstance(el, list): + _markers_collect_extras(el, collection) + + +def get_contained_extras(marker): + """Collect "extra == ..." operands from a marker. + + Returns a list of str. Each str is a speficied extra in this marker. + """ + if not marker: + return set() + marker = Marker(str(marker)) + extras = set() + _markers_collect_extras(marker._markers, extras) + return extras + + +def _markers_contains_extra(markers): + # Optimization: the marker element is usually appended at the end. + for element in reversed(markers): + if isinstance(element, tuple) and element[0].value == "extra": + return True + elif isinstance(element, list): + if _markers_contains_extra(element): + return True + return False + + +def contains_extra(marker): + """Check whehter a marker contains an "extra == ..." operand. + """ + if not marker: + return False + marker = Marker(str(marker)) + return _markers_contains_extra(marker._markers) + + +def format_pyspec(specifier): + if isinstance(specifier, str): + if not any(operator in specifier for operator in Specifier._operators.keys()): + new_op = "==" + new_version = specifier + return Specifier("{0}{1}".format(new_op, new_version)) + version = specifier._coerce_version(specifier.version.replace(".*", "")) + version_tuple = version._version.release + if specifier.operator in (">", "<="): + # Prefer to always pick the operator for version n+1 + if version_tuple[1] < PYTHON_BOUNDARIES.get(version_tuple[0], 0): + if specifier.operator == ">": + new_op = ">=" + else: + new_op = "<" + new_version = (version_tuple[0], version_tuple[1] + 1) + specifier = Specifier("{0}{1}".format(new_op, version_to_str(new_version))) + return specifier + + +def make_version_tuple(version): + return tuple([int(x) for x in version.split(".")]) + + +def version_to_str(version): + return ".".join([str(i) for i in version]) + + +def get_specs(specset): + if isinstance(specset, Specifier): + specset = str(specset) + if isinstance(specset, str): + specset = SpecifierSet(specset.replace(".*", "")) + + specs = getattr(specset, "_specs", None) + return [(spec._spec[0], make_version_tuple(spec._spec[1])) for spec in list(specs)] + + +def group_by_version(versions): + versions = sorted(map(lambda x: make_version_tuple(x))) + grouping = itertools.groupby(versions, key=operator.itemgetter(0)) + return grouping + + +def group_by_op(specs): + specs = [get_specs(x) for x in list(specs)] + flattened = [(op, version) for spec in specs for op, version in spec] + specs = sorted(flattened, key=operator.itemgetter(1)) + grouping = itertools.groupby(specs, key=operator.itemgetter(0)) + return grouping + + +def marker_to_spec(marker): + if marker._markers[0][0] != 'python_version': + return + operator = marker._markers[0][1].value + version = marker._markers[0][2].value + specset = set() + if operator in ("in", "not in"): + op = "==" if operator == "in" else "!=" + specset |= set([Specifier("{0}{1}".format(op, v.strip())) for v in version.split(",")]) + else: + spec = Specifier("".join([operator, version])) + specset.add(spec) + if specset: + return specset + return None + + +def cleanup_specs(specs, operator="or"): + specs = {format_pyspec(spec) for spec in specs} + # for != operator we want to group by version + # if all are consecutive, join as a list + results = set() + for op, versions in group_by_op(specs): + versions = [version[1] for version in versions] + versions = sorted(vistir.misc.dedup(versions)) + # if we are doing an or operation, we need to use the min for >= + # this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6 + # if we do an AND operation we need to use MAX to be more selective + if op in (">", ">="): + if operator == "or": + results.add((op, version_to_str(min(versions)))) + else: + results.add((op, version_to_str(max(versions)))) + # we use inverse logic here so we will take the max value if we are using OR + # but the min value if we are using AND + elif op in ("<=", "<"): + if operator == "or": + results.add((op, version_to_str(max(versions)))) + else: + results.add((op, version_to_str(min(versions)))) + # leave these the same no matter what operator we use + elif op in ("!=", "==", "~="): + version_list = sorted(["{0}".format(version_to_str(version)) for version in versions]) + version = ", ".join(version_list) + if len(version_list) == 1: + results.add((op, version)) + else: + if op == "!=": + results.add(("not in", version)) + elif op == "==": + results.add(("in", version)) + else: + version = ", ".join(sorted(["{0}".format(op, v) for v in version_list])) + specifier = SpecifierSet(version)._specs + for s in specifier: + results &= (specifier._spec[0], specifier._spec[1]) + else: + if len(version) == 1: + results.add((op, version)) + else: + specifier = SpecifierSet("{0}".format(version))._specs + for s in specifier: + results |= (specifier._spec[0], specifier._spec[1]) + return results diff --git a/pipenv/vendor/passa/metadata.py b/pipenv/vendor/passa/metadata.py new file mode 100644 index 0000000000..312691a19c --- /dev/null +++ b/pipenv/vendor/passa/metadata.py @@ -0,0 +1,169 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import copy +import itertools + +import packaging.markers +import packaging.specifiers +import vistir +import vistir.misc + +from .markers import get_without_extra, cleanup_specs, marker_to_spec + + +def dedup_markers(s): + # TODO: Implement better logic. + deduped = sorted(vistir.misc.dedup(s)) + return deduped + + +class MetaSet(object): + """Representation of a "metadata set". + + This holds multiple metadata representaions. Each metadata representation + includes a marker, and a specifier set of Python versions required. + """ + def __init__(self): + self.markerset = frozenset() + self.pyspecset = packaging.specifiers.SpecifierSet() + + def __repr__(self): + return "MetaSet(markerset={0!r}, pyspecset={1!r})".format( + ",".join(sorted(self.markerset)), str(self.pyspecset), + ) + + def __str__(self): + pyspecs = set() + markerset = set() + for m in self.markerset: + py_marker = marker_to_spec(packaging.markers.Marker(m)) + if py_marker: + pyspecs.add(py_marker) + else: + markerset.add(m) + if pyspecs: + self.pyspecset._specs &= pyspecs + self.markerset = frozenset(markerset) + return " and ".join(dedup_markers(itertools.chain( + # Make sure to always use the same quotes so we can dedup properly. + ( + "{0}".format(ms) if " or " in ms else ms + for ms in (str(m).replace('"', "'") for m in self.markerset) + ), + ( + "python_version {0[0]} '{0[1]}'".format(spec) + for spec in cleanup_specs(self.pyspecset) + ), + ))) + + def __bool__(self): + return bool(self.markerset or self.pyspecset) + + def __nonzero__(self): # Python 2. + return self.__bool__() + + def __or__(self, pair): + marker, specset = pair + markerset = set(self.markerset) + pyspec_markers = set() + if marker: + pyspec_markers = marker_to_spec(marker) + if not pyspec_markers: + markerset.add(str(marker)) + else: + specset._specs &= pyspec_markers + metaset = MetaSet() + metaset.markerset = frozenset(markerset) + # TODO: Implement some logic to clean up dups like '3.0.*' and '3.0'. + metaset.pyspecset &= self.pyspecset & specset + return metaset + + +def _build_metasets(dependencies, pythons, key, trace, all_metasets): + all_parent_metasets = [] + for route in trace: + parent = route[-1] + try: + parent_metasets = all_metasets[parent] + except KeyError: # Parent not calculated yet. Wait for it. + return + all_parent_metasets.append((parent, parent_metasets)) + + metaset_iters = [] + for parent, parent_metasets in all_parent_metasets: + r = dependencies[parent][key] + python = pythons[key] + metaset = ( + get_without_extra(r.markers), + packaging.specifiers.SpecifierSet(python), + ) + metaset_iters.append( + parent_metaset | metaset + for parent_metaset in parent_metasets + ) + return list(itertools.chain.from_iterable(metaset_iters)) + + +def _calculate_metasets_mapping(dependencies, pythons, traces): + all_metasets = {None: [MetaSet()]} + + del traces[None] + while traces: + new_metasets = {} + for key, trace in traces.items(): + assert key not in all_metasets, key # Sanity check for debug. + metasets = _build_metasets( + dependencies, pythons, key, trace, all_metasets, + ) + if metasets is None: + continue + new_metasets[key] = metasets + if not new_metasets: + break # No progress? Deadlocked. Give up. + all_metasets.update(new_metasets) + for key in new_metasets: + del traces[key] + + return all_metasets + + +def _format_metasets(metasets): + # If there is an unconditional route, this needs to be unconditional. + if not metasets or not all(metasets): + return None + + # This extra str(Marker()) call helps simplify the expression. + return str(packaging.markers.Marker(" or ".join( + "{0}".format(s) if " and " in s else s + for s in dedup_markers(str(metaset) for metaset in metasets + if metaset) + ))) + + +def set_metadata(candidates, traces, dependencies, pythons): + """Add "metadata" to candidates based on the dependency tree. + + Metadata for a candidate includes markers and a specifier for Python + version requirements. + + :param candidates: A key-candidate mapping. Candidates in the mapping will + have their markers set. + :param traces: A graph trace (produced by `traces.trace_graph`) providing + information about dependency relationships between candidates. + :param dependencies: A key-collection mapping containing what dependencies + each candidate in `candidates` requested. + :param pythons: A key-str mapping containing Requires-Python information + of each candidate. + + Keys in mappings and entries in the trace are identifiers of a package, as + implemented by the `identify` method of the resolver's provider. + + The candidates are modified in-place. + """ + metasets_mapping = _calculate_metasets_mapping( + dependencies, pythons, copy.deepcopy(traces), + ) + for key, candidate in candidates.items(): + candidate.markers = _format_metasets(metasets_mapping[key]) diff --git a/pipenv/vendor/passa/operations/__init__.py b/pipenv/vendor/passa/operations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/passa/operations/_utils.py b/pipenv/vendor/passa/operations/_utils.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/passa/operations/lock.py b/pipenv/vendor/passa/operations/lock.py new file mode 100644 index 0000000000..a68d0b7d99 --- /dev/null +++ b/pipenv/vendor/passa/operations/lock.py @@ -0,0 +1,28 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +from resolvelib import NoVersionsAvailable, ResolutionImpossible + +from passa.reporters import print_requirement + + +def lock(locker): + success = False + try: + locker.lock() + except NoVersionsAvailable as e: + print("\nCANNOT RESOLVE. NO CANDIDATES FOUND FOR:") + print("{:>40}".format(e.requirement.as_line(include_hashes=False))) + if e.parent: + line = e.parent.as_line(include_hashes=False) + print("{:>41}".format("(from {})".format(line))) + else: + print("{:>41}".format("(user)")) + except ResolutionImpossible as e: + print("\nCANNOT RESOLVE.\nOFFENDING REQUIREMENTS:") + for r in e.requirements: + print_requirement(r) + else: + success = True + return success diff --git a/pipenv/vendor/passa/operations/sync.py b/pipenv/vendor/passa/operations/sync.py new file mode 100644 index 0000000000..3014e8d963 --- /dev/null +++ b/pipenv/vendor/passa/operations/sync.py @@ -0,0 +1,23 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + + +def sync(syncer): + print("Starting synchronization") + installed, updated, cleaned = syncer.sync() + if cleaned: + print("Uninstalled: {}".format(", ".join(sorted(cleaned)))) + if installed: + print("Installed: {}".format(", ".join(sorted(installed)))) + if updated: + print("Updated: {}".format(", ".join(sorted(updated)))) + return True + + +def clean(cleaner): + print("Cleaning") + cleaned = cleaner.clean() + if cleaned: + print("Uninstalled: {}".format(", ".join(sorted(cleaned)))) + return True diff --git a/pipenv/vendor/passa/projects.py b/pipenv/vendor/passa/projects.py new file mode 100644 index 0000000000..79e71bb1b2 --- /dev/null +++ b/pipenv/vendor/passa/projects.py @@ -0,0 +1,235 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import collections +import io +import os + +import attr +import packaging.markers +import packaging.utils +import plette +import plette.models +import six +import tomlkit + + +SectionDifference = collections.namedtuple("SectionDifference", [ + "inthis", "inthat", +]) +FileDifference = collections.namedtuple("FileDifference", [ + "default", "develop", +]) + + +def _are_pipfile_entries_equal(a, b): + a = {k: v for k, v in a.items() if k not in ("markers", "hashes", "hash")} + b = {k: v for k, v in b.items() if k not in ("markers", "hashes", "hash")} + if a != b: + return False + try: + marker_eval_a = packaging.markers.Marker(a["markers"]).evaluate() + except (AttributeError, KeyError, TypeError, ValueError): + marker_eval_a = True + try: + marker_eval_b = packaging.markers.Marker(b["markers"]).evaluate() + except (AttributeError, KeyError, TypeError, ValueError): + marker_eval_b = True + return marker_eval_a == marker_eval_b + + +DEFAULT_NEWLINES = "\n" + + +def preferred_newlines(f): + if isinstance(f.newlines, six.text_type): + return f.newlines + return DEFAULT_NEWLINES + + +@attr.s +class ProjectFile(object): + """A file in the Pipfile project. + """ + location = attr.ib() + line_ending = attr.ib() + model = attr.ib() + + @classmethod + def read(cls, location, model_cls, invalid_ok=False): + try: + with io.open(location, encoding="utf-8") as f: + model = model_cls.load(f) + line_ending = preferred_newlines(f) + except Exception: + if not invalid_ok: + raise + model = None + line_ending = DEFAULT_NEWLINES + return cls(location=location, line_ending=line_ending, model=model) + + def write(self): + kwargs = {"encoding": "utf-8", "newline": self.line_ending} + with io.open(self.location, "w", **kwargs) as f: + self.model.dump(f) + + def dumps(self): + strio = six.StringIO() + self.model.dump(strio) + return strio.getvalue() + + +@attr.s +class Project(object): + + root = attr.ib() + _p = attr.ib(init=False) + _l = attr.ib(init=False) + + def __attrs_post_init__(self): + self.root = root = os.path.abspath(self.root) + self._p = ProjectFile.read( + os.path.join(root, "Pipfile"), + plette.Pipfile, + ) + self._l = ProjectFile.read( + os.path.join(root, "Pipfile.lock"), + plette.Lockfile, + invalid_ok=True, + ) + + @property + def pipfile(self): + return self._p.model + + @property + def pipfile_location(self): + return self._p.location + + @property + def lockfile(self): + return self._l.model + + @property + def lockfile_location(self): + return self._l.location + + @lockfile.setter + def lockfile(self, new): + self._l.model = new + + def is_synced(self): + return self.lockfile and self.lockfile.is_up_to_date(self.pipfile) + + def _get_pipfile_section(self, develop, insert=True): + name = "dev-packages" if develop else "packages" + try: + section = self.pipfile[name] + except KeyError: + section = plette.models.PackageCollection(tomlkit.table()) + if insert: + self.pipfile[name] = section + return section + + def contains_key_in_pipfile(self, key): + sections = [ + self._get_pipfile_section(develop=False, insert=False), + self._get_pipfile_section(develop=True, insert=False), + ] + return any( + (packaging.utils.canonicalize_name(name) == + packaging.utils.canonicalize_name(key)) + for section in sections + for name in section + ) + + def add_line_to_pipfile(self, line, develop): + from requirementslib import Requirement + requirement = Requirement.from_line(line) + section = self._get_pipfile_section(develop=develop) + key = requirement.normalized_name + entry = next(iter(requirement.as_pipfile().values())) + if isinstance(entry, dict): + # HACK: TOMLKit prefers to expand tables by default, but we + # always want inline tables here. Also tomlkit.inline_table + # does not have `update()`. + table = tomlkit.inline_table() + for k, v in entry.items(): + table[k] = v + entry = table + section[key] = entry + + def remove_keys_from_pipfile(self, keys, default, develop): + keys = {packaging.utils.canonicalize_name(key) for key in keys} + sections = [] + if default: + sections.append(self._get_pipfile_section( + develop=False, insert=False, + )) + if develop: + sections.append(self._get_pipfile_section( + develop=True, insert=False, + )) + for section in sections: + removals = set() + for name in section: + if packaging.utils.canonicalize_name(name) in keys: + removals.add(name) + for key in removals: + del section._data[key] + + def remove_keys_from_lockfile(self, keys): + keys = {packaging.utils.canonicalize_name(key) for key in keys} + removed = False + for section_name in ("default", "develop"): + try: + section = self.lockfile[section_name] + except KeyError: + continue + removals = {} + for name in section: + if packaging.utils.canonicalize_name(name) in keys: + removals.add(name) + removed = removed or bool(removals) + for key in removals: + del section._data[key] + + if removed: + # HACK: The lock file no longer represents the Pipfile at this + # point. Set the hash to an arbitrary invalid value. + self.lockfile.meta.hash = plette.models.Hash({"__invalid__": ""}) + + def difference_lockfile(self, lockfile): + """Generate a difference between the current and given lockfiles. + + Returns a 2-tuple containing differences in default in develop + sections. + + Each element is a 2-tuple of dicts. The first, `inthis`, contains + entries only present in the current lockfile; the second, `inthat`, + contains entries only present in the given one. + + If a key exists in both this and that, but the values differ, the key + is present in both dicts, pointing to values from each file. + """ + diff_data = { + "default": SectionDifference({}, {}), + "develop": SectionDifference({}, {}), + } + for section_name, section_diff in diff_data.items(): + this = self.lockfile[section_name]._data + that = lockfile[section_name]._data + for key, this_value in this.items(): + try: + that_value = that[key] + except KeyError: + section_diff.inthis[key] = this_value + continue + if not _are_pipfile_entries_equal(this_value, that_value): + section_diff.inthis[key] = this_value + section_diff.inthat[key] = that_value + for key, that_value in that.items(): + if key not in this: + section_diff.inthat[key] = that_value + return FileDifference(**diff_data) diff --git a/pipenv/vendor/passa/providers.py b/pipenv/vendor/passa/providers.py new file mode 100644 index 0000000000..7dfa3306dd --- /dev/null +++ b/pipenv/vendor/passa/providers.py @@ -0,0 +1,167 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import os + +import resolvelib + +from .candidates import find_candidates +from .dependencies import get_dependencies +from .utils import filter_sources, identify_requirment + + +class BasicProvider(resolvelib.AbstractProvider): + """Provider implementation to interface with `requirementslib.Requirement`. + """ + def __init__(self, root_requirements, sources, allow_prereleases): + self.sources = sources + self.allow_prereleases = bool(allow_prereleases) + self.invalid_candidates = set() + + # Remember requirements of each pinned candidate. The resolver calls + # `get_dependencies()` only when it wants to repin, so the last time + # the dependencies we got when it is last called on a package, are + # the set used by the resolver. We use this later to trace how a given + # dependency is specified by a package. + self.fetched_dependencies = {None: { + self.identify(r): r for r in root_requirements + }} + # TODO: Find a way to resolve with multiple versions (by tricking + # runtime) Include multiple keys in pipfiles? + self.requires_pythons = {None: ""} # TODO: Don't use any value + + def identify(self, dependency): + return identify_requirment(dependency) + + def get_preference(self, resolution, candidates, information): + # TODO: Provide better sorting logic. This simply resolve the ones with + # less choices first. Not sophisticated, but sounds reasonable? + return len(candidates) + + def find_matches(self, requirement): + # TODO: Implement per-package prereleases flag. (pypa/pipenv#1696) + allow_prereleases = self.allow_prereleases + sources = filter_sources(requirement, self.sources) + candidates = find_candidates(requirement, sources, allow_prereleases) + return candidates + + def is_satisfied_by(self, requirement, candidate): + # A non-named requirement has exactly one candidate, as implemented in + # `find_matches()`. It must match. + if not requirement.is_named: + return True + + # Optimization: Everything matches if there are no specifiers. + if not requirement.specifiers: + return True + + # We can't handle old version strings before PEP 440. Drop them all. + # Practically this shouldn't be a problem if the user is specifying a + # remotely reasonable dependency not from before 2013. + candidate_line = candidate.as_line() + if candidate_line in self.invalid_candidates: + return False + try: + version = candidate.get_specifier().version + except ValueError: + print('ignoring invalid version {}'.format(candidate_line)) + self.invalid_candidates.add(candidate_line) + return False + + return requirement.as_ireq().specifier.contains(version) + + def get_dependencies(self, candidate): + sources = filter_sources(candidate, self.sources) + try: + dependencies, requires_python = get_dependencies( + candidate, sources=sources, + ) + except Exception as e: + if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): + raise + print("failed to get dependencies for {0!r}: {1}".format( + candidate.as_line(include_hashes=False), e, + )) + dependencies = [] + requires_python = "" + candidate_key = self.identify(candidate) + self.fetched_dependencies[candidate_key] = { + self.identify(r): r for r in dependencies + } + self.requires_pythons[candidate_key] = requires_python + return dependencies + + +class PinReuseProvider(BasicProvider): + """A provider that reuses preferred pins if possible. + + This is used to implement "add", "remove", and "only-if-needed upgrade", + where already-pinned candidates in Pipfile.lock should be preferred. + """ + def __init__(self, preferred_pins, *args, **kwargs): + super(PinReuseProvider, self).__init__(*args, **kwargs) + self.preferred_pins = preferred_pins + + def find_matches(self, requirement): + candidates = super(PinReuseProvider, self).find_matches(requirement) + try: + # Add the preferred pin. Remember the resolve prefer candidates + # at the end of the list, so the most preferred should be last. + candidates.append(self.preferred_pins[self.identify(requirement)]) + except KeyError: + pass + return candidates + + +class EagerUpgradeProvider(PinReuseProvider): + """A specialized provider to handle an "eager" upgrade strategy. + + An eager upgrade tries to upgrade not only packages specified, but also + their dependeices (recursively). This contrasts to the "only-if-needed" + default, which only promises to upgrade the specified package, and + prevents touching anything else if at all possible. + + The provider is implemented as to keep track of all dependencies of the + specified packages to upgrade, and free their pins when it has a chance. + """ + def __init__(self, tracked_names, *args, **kwargs): + super(EagerUpgradeProvider, self).__init__(*args, **kwargs) + self.tracked_names = set(tracked_names) + for name in tracked_names: + self.preferred_pins.pop(name, None) + + # HACK: Set this special flag to distinguish preferred pins from + # regular, to tell the resolver to NOT use them for tracked packages. + for pin in self.preferred_pins.values(): + pin._preferred_by_provider = True + + def is_satisfied_by(self, requirement, candidate): + # If this is a tracking package, tell the resolver out of using the + # preferred pin, and into a "normal" candidate selection process. + if (self.identify(requirement) in self.tracked_names and + getattr(candidate, "_preferred_by_provider", False)): + return False + return super(EagerUpgradeProvider, self).is_satisfied_by( + requirement, candidate, + ) + + def get_dependencies(self, candidate): + # If this package is being tracked for upgrade, remove pins of its + # dependencies, and start tracking these new packages. + dependencies = super(EagerUpgradeProvider, self).get_dependencies( + candidate, + ) + if self.identify(candidate) in self.tracked_names: + for dependency in dependencies: + name = self.identify(dependency) + self.tracked_names.add(name) + self.preferred_pins.pop(name, None) + return dependencies + + def get_preference(self, resolution, candidates, information): + # Resolve tracking packages so we have a chance to unpin them first. + name = self.identify(candidates[0]) + if name in self.tracked_names: + return -1 + return len(candidates) diff --git a/pipenv/vendor/passa/reporters.py b/pipenv/vendor/passa/reporters.py new file mode 100644 index 0000000000..4fe6c0b810 --- /dev/null +++ b/pipenv/vendor/passa/reporters.py @@ -0,0 +1,90 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import resolvelib + +from .traces import trace_graph + + +def print_title(text): + print('\n{:=^84}\n'.format(text)) + + +def print_requirement(r, end='\n'): + print('{:>40}'.format(r.as_line(include_hashes=False)), end=end) + + +def print_dependency(state, key): + print_requirement(state.mapping[key], end='') + parents = sorted( + state.graph.iter_parents(key), + key=lambda n: (-1, '') if n is None else (ord(n[0].lower()), n), + ) + for i, p in enumerate(parents): + if p is None: + line = '(user)' + else: + line = state.mapping[p].as_line(include_hashes=False) + if i == 0: + padding = ' <= ' + else: + padding = ' ' * 44 + print('{pad}{line}'.format(pad=padding, line=line)) + + +class StdOutReporter(resolvelib.BaseReporter): + """Simple reporter that prints things to stdout. + """ + def __init__(self, requirements): + super(StdOutReporter, self).__init__() + self.requirements = requirements + + def starting(self): + self._prev = None + print_title(' User requirements ') + for r in self.requirements: + print_requirement(r) + + def ending_round(self, index, state): + print_title(' Round {} '.format(index)) + mapping = state.mapping + if self._prev is None: + difference = set(mapping.keys()) + changed = set() + else: + difference = set(mapping.keys()) - set(self._prev.keys()) + changed = set( + k for k, v in mapping.items() + if k in self._prev and self._prev[k] != v + ) + self._prev = mapping + + if difference: + print('New pins: ') + for k in difference: + print_dependency(state, k) + print() + + if changed: + print('Changed pins:') + for k in changed: + print_dependency(state, k) + print() + + def ending(self, state): + print_title(" STABLE PINS ") + path_lists = trace_graph(state.graph) + for k in sorted(state.mapping): + print(state.mapping[k].as_line(include_hashes=False)) + paths = path_lists[k] + for path in paths: + if path == [None]: + print(' User requirement') + continue + print(' ', end='') + for v in reversed(path[1:]): + line = state.mapping[v].as_line(include_hashes=False) + print(' <=', line, end='') + print() + print() diff --git a/pipenv/vendor/passa/reporters/__init__.py b/pipenv/vendor/passa/reporters/__init__.py new file mode 100644 index 0000000000..dffe532788 --- /dev/null +++ b/pipenv/vendor/passa/reporters/__init__.py @@ -0,0 +1,31 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +from .base import BaseReporter + + +_REPORTER = BaseReporter() + + +def _get_stdout_reporter(): + from .stdout import Reporter + return Reporter() + + +def configure_reporter(name): + global _REPORTER + _REPORTER = { + None: BaseReporter, + "stdout": _get_stdout_reporter, + }[name]() + + +def get_reporter(): + return _REPORTER + + +def report(event, context=None): + if context is None: + context = {} + _REPORTER.report(event, context) diff --git a/pipenv/vendor/passa/reporters/base.py b/pipenv/vendor/passa/reporters/base.py new file mode 100644 index 0000000000..66a432cbc8 --- /dev/null +++ b/pipenv/vendor/passa/reporters/base.py @@ -0,0 +1,52 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import resolvelib + + +class ResolveLibReporter(resolvelib.BaseReporter): + """Implementation of a ResolveLib reporter that bridge messages. + """ + def __init__(self, parent): + super(ResolveLibReporter, self).__init__() + self.parent = parent + + def starting(self): + self.parent.report("resolvelib-starting", {"child": self}) + + def ending_round(self, index, state): + self.parent.report("resolvelib-ending-round", { + "child": self, "index": index, "state": state, + }) + + def ending(self, state): + self.parent.report("resolvelib-ending", { + "child": self, "state": state, + }) + + +class BaseReporter(object): + """Basic reporter that does nothing. + """ + def build_for_resolvelib(self): + """Build a reporter for ResolveLib. + """ + return ResolveLibReporter(self) + + def report(self, event, context): + """Report an event. + + The default behavior is to look for a "handle_EVENT" method on the + class to execute, or do nothing if there is no such method. + + :param event: A string to indicate the event. + :param context: A mapping containing appropriate data for the handling + function. + """ + handler_name = "handle_{}".format(event.replace("-", "_")) + try: + handler = getattr(self, handler_name) + except AttributeError: + return + handler(context or {}) diff --git a/pipenv/vendor/passa/reporters/stdout.py b/pipenv/vendor/passa/reporters/stdout.py new file mode 100644 index 0000000000..2042337757 --- /dev/null +++ b/pipenv/vendor/passa/reporters/stdout.py @@ -0,0 +1,106 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +from resolvelib import NoVersionsAvailable, ResolutionImpossible + +from .base import BaseReporter + + +def _print_title(text): + print('\n{:=^84}\n'.format(text)) + + +def _print_requirement(r, end='\n'): + print('{:>40}'.format(r.as_line(include_hashes=False)), end=end) + + +def _print_dependency(state, key): + _print_requirement(state.mapping[key], end='') + parents = sorted( + state.graph.iter_parents(key), + key=lambda n: (-1, '') if n is None else (ord(n[0].lower()), n), + ) + for i, p in enumerate(parents): + if p is None: + line = '(user)' + else: + line = state.mapping[p].as_line(include_hashes=False) + if i == 0: + padding = ' <= ' + else: + padding = ' ' * 44 + print('{pad}{line}'.format(pad=padding, line=line)) + + +class Reporter(BaseReporter): + """A reporter implementation that prints messages to stdout. + """ + def handle_resolvelib_starting(self, context): + context["child"]._prev_mapping = None + + def handle_resolvelib_ending_round(self, context): + _print_title(' Round {} '.format(context["index"])) + mapping = context["state"].mapping + if context["child"]._prev_mapping is None: + difference = set(mapping.keys()) + changed = set() + else: + prev = context["child"]._prev_mapping + difference = set(mapping.keys()) - set(prev.keys()) + changed = set( + k for k, v in mapping.items() + if k in prev and prev[k] != v + ) + context["child"]._prev_mapping = mapping + + if difference: + print('New pins: ') + for k in difference: + _print_dependency(context["state"], k) + print() + + if changed: + print('Changed pins:') + for k in changed: + _print_dependency(context["state"], k) + print() + + def handle_lock_starting(self, context): + _print_title(' User requirements ') + for r in context["requirements"]: + _print_requirement(r) + + def handle_lock_trace_ended(self, context): + _print_title(" STABLE PINS ") + mapping = context["state"].mapping + for k in sorted(mapping): + print(mapping[k].as_line(include_hashes=False)) + paths = context["traces"][k] + for path in paths: + if path == [None]: + print(' User requirement') + continue + print(' ', end='') + for v in reversed(path[1:]): + line = mapping[v].as_line(include_hashes=False) + print(' <=', line, end='') + print() + print() + + def handle_lock_failed(self, context): + e = context["exception"] + if isinstance(e, ResolutionImpossible): + print("\nCANNOT RESOLVE.\nOFFENDING REQUIREMENTS:") + for r in e.requirements: + _print_requirement(r) + elif isinstance(e, NoVersionsAvailable): + print("\nCANNOT RESOLVE. NO CANDIDATES FOUND FOR:") + print("{:>40}".format(e.requirement.as_line(include_hashes=False))) + if e.parent: + line = e.parent.as_line(include_hashes=False) + print("{:>41}".format("(from {})".format(line))) + else: + print("{:>41}".format("(user)")) + else: + raise diff --git a/pipenv/vendor/passa/synchronizers.py b/pipenv/vendor/passa/synchronizers.py new file mode 100644 index 0000000000..30fc4492aa --- /dev/null +++ b/pipenv/vendor/passa/synchronizers.py @@ -0,0 +1,211 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import collections +import contextlib +import os +import sys +import sysconfig + +import pkg_resources + +import packaging.markers +import packaging.version +import requirementslib + +from ._pip import uninstall_requirement, EditableInstaller, WheelInstaller + + +def _is_installation_local(name): + """Check whether the distribution is in the current Python installation. + + This is used to distinguish packages seen by a virtual environment. A venv + may be able to see global packages, but we don't want to mess with them. + """ + location = pkg_resources.working_set.by_key[name].location + return os.path.commonprefix([location, sys.prefix]) == sys.prefix + + +def _is_up_to_date(distro, version): + # This is done in strings to avoid type mismatches caused by vendering. + return str(version) == str(packaging.version.parse(distro.version)) + + +GroupCollection = collections.namedtuple("GroupCollection", [ + "uptodate", "outdated", "noremove", "unneeded", +]) + + +def _group_installed_names(packages): + """Group locally installed packages based on given specifications. + + `packages` is a name-package mapping that are used as baseline to + determine how the installed package should be grouped. + + Returns a 3-tuple of disjoint sets, all containing names of installed + packages: + + * `uptodate`: These match the specifications. + * `outdated`: These installations are specified, but don't match the + specifications in `packages`. + * `unneeded`: These are installed, but not specified in `packages`. + """ + groupcoll = GroupCollection(set(), set(), set(), set()) + + for distro in pkg_resources.working_set: + name = distro.key + try: + package = packages[name] + except KeyError: + groupcoll.unneeded.add(name) + continue + + r = requirementslib.Requirement.from_pipfile(name, package) + if not r.is_named: + # Always mark non-named. I think pip does something similar? + groupcoll.outdated.add(name) + elif not _is_up_to_date(distro, r.get_version()): + groupcoll.outdated.add(name) + else: + groupcoll.uptodate.add(name) + + return groupcoll + + +@contextlib.contextmanager +def _remove_package(name): + if name is None or not _is_installation_local(name): + yield + return + r = requirementslib.Requirement.from_line(name) + with uninstall_requirement(r.as_ireq(), auto_confirm=True, verbose=False): + yield + + +def _get_packages(lockfile, default, develop): + # Don't need to worry about duplicates because only extras can differ. + # Extras don't matter because they only affect dependencies, and we + # don't install dependencies anyway! + packages = {} + if default: + packages.update(lockfile.default._data) + if develop: + packages.update(lockfile.develop._data) + return packages + + +def _build_paths(): + """Prepare paths for distlib.wheel.Wheel to install into. + """ + paths = sysconfig.get_paths() + return { + "prefix": sys.prefix, + "data": paths["data"], + "scripts": paths["scripts"], + "headers": paths["include"], + "purelib": paths["purelib"], + "platlib": paths["platlib"], + } + + +PROTECTED_FROM_CLEAN = {"setuptools", "pip"} + + +def _clean(names): + for name in names: + if name in PROTECTED_FROM_CLEAN: + continue + with _remove_package(name): + pass + + +class Synchronizer(object): + """Helper class to install packages from a project's lock file. + """ + def __init__(self, project, default, develop, clean_unneeded): + self._root = project.root # Only for repr. + self.packages = _get_packages(project.lockfile, default, develop) + self.sources = project.lockfile.meta.sources._data + self.paths = _build_paths() + self.clean_unneeded = clean_unneeded + + def __repr__(self): + return "<{0} @ {1!r}>".format(type(self).__name__, self._root) + + def sync(self): + groupcoll = _group_installed_names(self.packages) + + installed = set() + updated = set() + cleaned = set() + + # TODO: Show a prompt to confirm cleaning. We will need to implement a + # reporter pattern for this as well. + if self.clean_unneeded: + cleaned.update(groupcoll.unneeded) + _clean(cleaned) + + # TODO: Specify installation order? (pypa/pipenv#2274) + installers = [] + for name, package in self.packages.items(): + r = requirementslib.Requirement.from_pipfile(name, package) + name = r.normalized_name + if name in groupcoll.uptodate: + continue + markers = r.markers + if markers and not packaging.markers.Marker(markers).evaluate(): + continue + r.markers = None + if r.editable: + installer = EditableInstaller(r) + else: + installer = WheelInstaller(r, self.sources, self.paths) + try: + installer.prepare() + except Exception as e: + if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): + raise + print("failed to prepare {0!r}: {1}".format( + r.as_line(include_hashes=False), e, + )) + else: + installers.append((name, installer)) + + for name, installer in installers: + if name in groupcoll.outdated: + name_to_remove = name + else: + name_to_remove = None + try: + with _remove_package(name_to_remove): + installer.install() + except Exception as e: + if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): + raise + print("failed to install {0!r}: {1}".format( + r.as_line(include_hashes=False), e, + )) + continue + if name in groupcoll.outdated or name in groupcoll.noremove: + updated.add(name) + else: + installed.add(name) + + return installed, updated, cleaned + + +class Cleaner(object): + """Helper class to clean packages not in a project's lock file. + """ + def __init__(self, project, default, develop): + self._root = project.root # Only for repr. + self.packages = _get_packages(project.lockfile, default, develop) + + def __repr__(self): + return "<{0} @ {1!r}>".format(type(self).__name__, self._root) + + def clean(self): + groupcoll = _group_installed_names(self.packages) + _clean(groupcoll.unneeded) + return groupcoll.unneeded diff --git a/pipenv/vendor/passa/traces.py b/pipenv/vendor/passa/traces.py new file mode 100644 index 0000000000..9715db975b --- /dev/null +++ b/pipenv/vendor/passa/traces.py @@ -0,0 +1,40 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + + +def _trace_visit_vertex(graph, current, target, visited, path, paths): + if current == target: + paths.append(path) + return + for v in graph.iter_children(current): + if v == current or v in visited: + continue + next_path = path + [current] + next_visited = visited | {current} + _trace_visit_vertex(graph, v, target, next_visited, next_path, paths) + + +def trace_graph(graph): + """Build a collection of "traces" for each package. + + A trace is a list of names that eventually leads to the package. For + example, if A and B are root dependencies, A depends on C and D, B + depends on C, and C depends on D, the return value would be like:: + + { + None: [], + "A": [None], + "B": [None], + "C": [[None, "A"], [None, "B"]], + "D": [[None, "B", "C"], [None, "A"]], + } + """ + result = {None: []} + for vertex in graph: + result[vertex] = [] + for root in graph.iter_children(None): + paths = [] + _trace_visit_vertex(graph, root, vertex, {None}, [None], paths) + result[vertex].extend(paths) + return result diff --git a/pipenv/vendor/passa/utils.py b/pipenv/vendor/passa/utils.py new file mode 100644 index 0000000000..1028db10ac --- /dev/null +++ b/pipenv/vendor/passa/utils.py @@ -0,0 +1,97 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, unicode_literals + + +def identify_requirment(r): + """Produce an identifier for a requirement to use in the resolver. + + Note that we are treating the same package with different extras as + distinct. This allows semantics like "I only want this extra in + development, not production". + + This also makes the resolver's implementation much simpler, with the minor + costs of possibly needing a few extra resolution steps if we happen to have + the same package apprearing multiple times. + """ + return "{0}{1}".format(r.normalized_name, r.extras_as_pip) + + +def get_pinned_version(ireq): + """Get the pinned version of an InstallRequirement. + + An InstallRequirement is considered pinned if: + + - Is not editable + - It has exactly one specifier + - That specifier is "==" + - The version does not contain a wildcard + + Examples: + django==1.8 # pinned + django>1.8 # NOT pinned + django~=1.8 # NOT pinned + django==1.* # NOT pinned + + Raises `TypeError` if the input is not a valid InstallRequirement, or + `ValueError` if the InstallRequirement is not pinned. + """ + try: + specifier = ireq.specifier + except AttributeError: + raise TypeError("Expected InstallRequirement, not {}".format( + type(ireq).__name__, + )) + + if ireq.editable: + raise ValueError("InstallRequirement is editable") + if not specifier: + raise ValueError("InstallRequirement has no version specification") + if len(specifier._specs) != 1: + raise ValueError("InstallRequirement has multiple specifications") + + op, version = next(iter(specifier._specs))._spec + if op not in ('==', '===') or version.endswith('.*'): + raise ValueError("InstallRequirement not pinned (is {0!r})".format( + op + version, + )) + + return version + + +def is_pinned(ireq): + """Returns whether an InstallRequirement is a "pinned" requirement. + + An InstallRequirement is considered pinned if: + + - Is not editable + - It has exactly one specifier + - That specifier is "==" + - The version does not contain a wildcard + + Examples: + django==1.8 # pinned + django>1.8 # NOT pinned + django~=1.8 # NOT pinned + django==1.* # NOT pinned + """ + try: + get_pinned_version(ireq) + except (TypeError, ValueError): + return False + return True + + +def filter_sources(requirement, sources): + """Return a filtered list of sources for this requirement. + + This considers the index specified by the requirement, and returns only + matching source entries if there is at least one. + """ + if not sources or not requirement.index: + return sources + filtered_sources = [ + source for source in sources + if source.get("name") == requirement.index + ] + return filtered_sources or sources diff --git a/pipenv/vendor/passa/vcs.py b/pipenv/vendor/passa/vcs.py new file mode 100644 index 0000000000..23d033d3bb --- /dev/null +++ b/pipenv/vendor/passa/vcs.py @@ -0,0 +1,37 @@ +import os + +from pip_shims import VcsSupport + +from .utils import cheesy_temporary_directory, mkdir_p + + +def _obtrain_ref(vcs_obj, src_dir, name, rev=None): + target_dir = os.path.join(src_dir, name) + target_rev = vcs_obj.make_rev_options(rev) + if not os.path.exists(target_dir): + vcs_obj.obtain(target_dir) + if (not vcs_obj.is_commit_id_equal(target_dir, rev) and + not vcs_obj.is_commit_id_equal(target_dir, target_rev)): + vcs_obj.update(target_dir, target_rev) + return vcs_obj.get_revision(target_dir) + + +def _get_src(): + src = os.environ.get("PIP_SRC") + if src: + return src + virtual_env = os.environ.get("VIRTUAL_ENV") + if virtual_env: + return os.path.join(virtual_env, "src") + temp_src = cheesy_temporary_directory(prefix='passa-src') + return temp_src + + +def set_ref(requirement): + backend = VcsSupport()._registry.get(requirement.vcs) + vcs = backend(url=requirement.req.vcs_uri) + src = _get_src() + mkdir_p(src, mode=0o775) + name = requirement.normalized_name + ref = _obtrain_ref(vcs, src, name, rev=requirement.req.ref) + requirement.req.ref = ref diff --git a/pipenv/vendor/yaspin/__init__.py b/pipenv/vendor/yaspin/__init__.py new file mode 100644 index 0000000000..57853a1389 --- /dev/null +++ b/pipenv/vendor/yaspin/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- + +from __future__ import absolute_import + +from .__version__ import __version__ # noqa +from .api import kbi_safe_yaspin, yaspin +from .base_spinner import Spinner + + +__all__ = ("yaspin", "kbi_safe_yaspin", "Spinner") diff --git a/pipenv/vendor/yaspin/__version__.py b/pipenv/vendor/yaspin/__version__.py new file mode 100644 index 0000000000..9e78220f94 --- /dev/null +++ b/pipenv/vendor/yaspin/__version__.py @@ -0,0 +1 @@ +__version__ = "0.14.0" diff --git a/pipenv/vendor/yaspin/api.py b/pipenv/vendor/yaspin/api.py new file mode 100644 index 0000000000..156630db2e --- /dev/null +++ b/pipenv/vendor/yaspin/api.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.api +~~~~~~~~~~ + +This module implements the Yaspin API. + +:copyright: (c) 2018 by Pavlo Dmytrenko. +:license: MIT, see LICENSE for more details. +""" + +import signal + +from .core import Yaspin +from .signal_handlers import default_handler + + +def yaspin(*args, **kwargs): + """Display spinner in stdout. + + Can be used as a context manager or as a function decorator. + + Arguments: + spinner (base_spinner.Spinner, optional): Spinner object to use. + text (str, optional): Text to show along with spinner. + color (str, optional): Spinner color. + on_color (str, optional): Color highlight for the spinner. + attrs (list, optional): Color attributes for the spinner. + reversal (bool, optional): Reverse spin direction. + side (str, optional): Place spinner to the right or left end + of the text string. + sigmap (dict, optional): Maps POSIX signals to their respective + handlers. + + Returns: + core.Yaspin: instance of the Yaspin class. + + Raises: + ValueError: If unsupported ``color`` is specified. + ValueError: If unsupported ``on_color`` is specified. + ValueError: If unsupported color attribute in ``attrs`` + is specified. + ValueError: If trying to register handler for SIGKILL signal. + ValueError: If unsupported ``side`` is specified. + + Available text colors: + red, green, yellow, blue, magenta, cyan, white. + + Available text highlights: + on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, + on_white, on_grey. + + Available attributes: + bold, dark, underline, blink, reverse, concealed. + + Example:: + + # Use as a context manager + with yaspin(): + some_operations() + + # Context manager with text + with yaspin(text="Processing..."): + some_operations() + + # Context manager with custom sequence + with yaspin(Spinner('-\\|/', 150)): + some_operations() + + # As decorator + @yaspin(text="Loading...") + def foo(): + time.sleep(5) + + foo() + + """ + return Yaspin(*args, **kwargs) + + +def kbi_safe_yaspin(*args, **kwargs): + kwargs["sigmap"] = {signal.SIGINT: default_handler} + return Yaspin(*args, **kwargs) + + +_kbi_safe_doc = yaspin.__doc__.replace("yaspin", "kbi_safe_yaspin") +kbi_safe_yaspin.__doc__ = _kbi_safe_doc diff --git a/pipenv/vendor/yaspin/base_spinner.py b/pipenv/vendor/yaspin/base_spinner.py new file mode 100644 index 0000000000..537ff79985 --- /dev/null +++ b/pipenv/vendor/yaspin/base_spinner.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.base_spinner +~~~~~~~~~~~~~~~~~~~ + +Spinner class, used to construct other spinners. +""" + +from __future__ import absolute_import + +from collections import namedtuple + + +Spinner = namedtuple("Spinner", "frames interval") +default_spinner = Spinner("⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏", 80) diff --git a/pipenv/vendor/yaspin/compat.py b/pipenv/vendor/yaspin/compat.py new file mode 100644 index 0000000000..744de5a1eb --- /dev/null +++ b/pipenv/vendor/yaspin/compat.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.compat +~~~~~~~~~~~~~ + +Compatibility layer. +""" + +import sys + + +PY2 = sys.version_info[0] == 2 + + +if PY2: + builtin_str = str + bytes = str + str = unicode # noqa + basestring = basestring # noqa + + def iteritems(dct): + return dct.iteritems() + + +else: + builtin_str = str + bytes = bytes + str = str + basestring = (str, bytes) + + def iteritems(dct): + return dct.items() diff --git a/pipenv/vendor/yaspin/constants.py b/pipenv/vendor/yaspin/constants.py new file mode 100644 index 0000000000..b26baabe8f --- /dev/null +++ b/pipenv/vendor/yaspin/constants.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.constants +~~~~~~~~~~~~~~~~ + +Some setups. +""" + + +ENCODING = "utf-8" +COLOR_MAP = { + # name: type + "blink": "attrs", + "bold": "attrs", + "concealed": "attrs", + "dark": "attrs", + "reverse": "attrs", + "underline": "attrs", + "blue": "color", + "cyan": "color", + "green": "color", + "magenta": "color", + "red": "color", + "white": "color", + "yellow": "color", + "on_blue": "on_color", + "on_cyan": "on_color", + "on_green": "on_color", + "on_grey": "on_color", + "on_magenta": "on_color", + "on_red": "on_color", + "on_white": "on_color", + "on_yellow": "on_color", +} +COLOR_ATTRS = COLOR_MAP.keys() + +# Get spinner names: +# $ < yaspin/data/spinners.json | jq '. | keys' +SPINNER_ATTRS = [ + "arc", + "arrow", + "arrow2", + "arrow3", + "balloon", + "balloon2", + "bounce", + "bouncingBall", + "bouncingBar", + "boxBounce", + "boxBounce2", + "christmas", + "circle", + "circleHalves", + "circleQuarters", + "clock", + "dots", + "dots10", + "dots11", + "dots12", + "dots2", + "dots3", + "dots4", + "dots5", + "dots6", + "dots7", + "dots8", + "dots9", + "dqpb", + "earth", + "flip", + "grenade", + "growHorizontal", + "growVertical", + "hamburger", + "hearts", + "layer", + "line", + "line2", + "monkey", + "moon", + "noise", + "pipe", + "point", + "pong", + "runner", + "shark", + "simpleDots", + "simpleDotsScrolling", + "smiley", + "squareCorners", + "squish", + "star", + "star2", + "toggle", + "toggle10", + "toggle11", + "toggle12", + "toggle13", + "toggle2", + "toggle3", + "toggle4", + "toggle5", + "toggle6", + "toggle7", + "toggle8", + "toggle9", + "triangle", + "weather", +] diff --git a/pipenv/vendor/yaspin/core.py b/pipenv/vendor/yaspin/core.py new file mode 100644 index 0000000000..d01fb98ef1 --- /dev/null +++ b/pipenv/vendor/yaspin/core.py @@ -0,0 +1,534 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.yaspin +~~~~~~~~~~~~~ + +A lightweight terminal spinner. +""" + +from __future__ import absolute_import + +import functools +import itertools +import signal +import sys +import threading +import time + +from .base_spinner import default_spinner +from .compat import PY2, basestring, builtin_str, bytes, iteritems, str +from .constants import COLOR_ATTRS, COLOR_MAP, ENCODING, SPINNER_ATTRS +from .helpers import to_unicode +from .termcolor import colored + + +class Yaspin(object): + """Implements a context manager that spawns a thread + to write spinner frames into a tty (stdout) during + context execution. + """ + + # When Python finds its output attached to a terminal, + # it sets the sys.stdout.encoding attribute to the terminal's encoding. + # The print statement's handler will automatically encode unicode + # arguments into bytes. + # + # In Py2 when piping or redirecting output, Python does not detect + # the desired character set of the output, it sets sys.stdout.encoding + # to None, and print will invoke the default "ascii" codec. + # + # Py3 invokes "UTF-8" codec by default. + # + # Thats why in Py2, output should be encoded manually with desired + # encoding in order to support pipes and redirects. + + def __init__( + self, + spinner=None, + text="", + color=None, + on_color=None, + attrs=None, + reversal=False, + side="left", + sigmap=None, + ): + # Spinner + self._spinner = self._set_spinner(spinner) + self._frames = self._set_frames(self._spinner, reversal) + self._interval = self._set_interval(self._spinner) + self._cycle = self._set_cycle(self._frames) + + # Color Specification + self._color = self._set_color(color) if color else color + self._on_color = self._set_on_color(on_color) if on_color else on_color + self._attrs = self._set_attrs(attrs) if attrs else set() + self._color_func = self._compose_color_func() + + # Other + self._text = self._set_text(text) + self._side = self._set_side(side) + self._reversal = reversal + + # Helper flags + self._stop_spin = None + self._hide_spin = None + self._spin_thread = None + self._last_frame = None + + # Signals + + # In Python 2 signal.SIG* are of type int. + # In Python 3 signal.SIG* are enums. + # + # Signal = Union[enum.Enum, int] + # SigHandler = Union[enum.Enum, Callable] + self._sigmap = sigmap if sigmap else {} # Dict[Signal, SigHandler] + # Maps signals to their default handlers in order to reset + # custom handlers set by ``sigmap`` at the cleanup phase. + self._dfl_sigmap = {} # Dict[Signal, SigHandler] + + # + # Dunders + # + def __repr__(self): + repr_ = u"".format(self._frames) + if PY2: + return repr_.encode(ENCODING) + return repr_ + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, traceback): + # Avoid stop() execution for the 2nd time + if self._spin_thread.is_alive(): + self.stop() + return False # nothing is handled + + def __call__(self, fn): + @functools.wraps(fn) + def inner(*args, **kwargs): + with self: + return fn(*args, **kwargs) + + return inner + + def __getattr__(self, name): + # CLI spinners + if name in SPINNER_ATTRS: + from .spinners import Spinners + + sp = getattr(Spinners, name) + self.spinner = sp + # Color Attributes: "color", "on_color", "attrs" + elif name in COLOR_ATTRS: + attr_type = COLOR_MAP[name] + # Call appropriate property setters; + # _color_func is updated automatically by setters. + if attr_type == "attrs": + self.attrs = [name] # calls property setter + if attr_type in ("color", "on_color"): + setattr(self, attr_type, name) # calls property setter + # Side: "left" or "right" + elif name in ("left", "right"): + self.side = name # calls property setter + # Common error for unsupported attributes + else: + raise AttributeError( + "'{0}' object has no attribute: '{1}'".format( + self.__class__.__name__, name + ) + ) + return self + + # + # Properties + # + @property + def spinner(self): + return self._spinner + + @spinner.setter + def spinner(self, sp): + self._spinner = self._set_spinner(sp) + self._frames = self._set_frames(self._spinner, self._reversal) + self._interval = self._set_interval(self._spinner) + self._cycle = self._set_cycle(self._frames) + + @property + def text(self): + return self._text + + @text.setter + def text(self, txt): + self._text = self._set_text(txt) + + @property + def color(self): + return self._color + + @color.setter + def color(self, value): + self._color = self._set_color(value) if value else value + self._color_func = self._compose_color_func() # update + + @property + def on_color(self): + return self._on_color + + @on_color.setter + def on_color(self, value): + self._on_color = self._set_on_color(value) if value else value + self._color_func = self._compose_color_func() # update + + @property + def attrs(self): + return list(self._attrs) + + @attrs.setter + def attrs(self, value): + new_attrs = self._set_attrs(value) if value else set() + self._attrs = self._attrs.union(new_attrs) + self._color_func = self._compose_color_func() # update + + @property + def side(self): + return self._side + + @side.setter + def side(self, value): + self._side = self._set_side(value) + + @property + def reversal(self): + return self._reversal + + @reversal.setter + def reversal(self, value): + self._reversal = value + self._frames = self._set_frames(self._spinner, self._reversal) + self._cycle = self._set_cycle(self._frames) + + # + # Public + # + def start(self): + if self._sigmap: + self._register_signal_handlers() + + if sys.stdout.isatty(): + self._hide_cursor() + + self._stop_spin = threading.Event() + self._hide_spin = threading.Event() + self._spin_thread = threading.Thread(target=self._spin) + self._spin_thread.start() + + def stop(self): + if self._dfl_sigmap: + # Reset registered signal handlers to default ones + self._reset_signal_handlers() + + if self._spin_thread: + self._stop_spin.set() + self._spin_thread.join() + + sys.stdout.write("\r") + self._clear_line() + + if sys.stdout.isatty(): + self._show_cursor() + + def hide(self): + """Hide the spinner to allow for custom writing to the terminal.""" + thr_is_alive = self._spin_thread and self._spin_thread.is_alive() + + if thr_is_alive and not self._hide_spin.is_set(): + # set the hidden spinner flag + self._hide_spin.set() + + # clear the current line + sys.stdout.write("\r") + self._clear_line() + + # flush the stdout buffer so the current line can be rewritten to + sys.stdout.flush() + + def show(self): + """Show the hidden spinner.""" + thr_is_alive = self._spin_thread and self._spin_thread.is_alive() + + if thr_is_alive and self._hide_spin.is_set(): + # clear the hidden spinner flag + self._hide_spin.clear() + + # clear the current line so the spinner is not appended to it + sys.stdout.write("\r") + self._clear_line() + + def write(self, text): + """Write text in the terminal without breaking the spinner.""" + # similar to tqdm.write() + # https://pypi.python.org/pypi/tqdm#writing-messages + sys.stdout.write("\r") + self._clear_line() + + _text = to_unicode(text) + if PY2: + _text = _text.encode(ENCODING) + + # Ensure output is bytes for Py2 and Unicode for Py3 + assert isinstance(_text, builtin_str) + + sys.stdout.write("{0}\n".format(_text)) + + def ok(self, text="OK"): + """Set Ok (success) finalizer to a spinner.""" + _text = text if text else "OK" + self._freeze(_text) + + def fail(self, text="FAIL"): + """Set fail finalizer to a spinner.""" + _text = text if text else "FAIL" + self._freeze(_text) + + # + # Protected + # + def _freeze(self, final_text): + """Stop spinner, compose last frame and 'freeze' it.""" + text = to_unicode(final_text) + self._last_frame = self._compose_out(text, mode="last") + + # Should be stopped here, otherwise prints after + # self._freeze call will mess up the spinner + self.stop() + sys.stdout.write(self._last_frame) + + def _spin(self): + while not self._stop_spin.is_set(): + + if self._hide_spin.is_set(): + # Wait a bit to avoid wasting cycles + time.sleep(self._interval) + continue + + # Compose output + spin_phase = next(self._cycle) + out = self._compose_out(spin_phase) + + # Write + sys.stdout.write(out) + self._clear_line() + sys.stdout.flush() + + # Wait + time.sleep(self._interval) + sys.stdout.write("\b") + + def _compose_color_func(self): + fn = functools.partial( + colored, + color=self._color, + on_color=self._on_color, + attrs=list(self._attrs), + ) + return fn + + def _compose_out(self, frame, mode=None): + # Ensure Unicode input + assert isinstance(frame, str) + assert isinstance(self._text, str) + + frame = frame.encode(ENCODING) if PY2 else frame + text = self._text.encode(ENCODING) if PY2 else self._text + + # Colors + if self._color_func is not None: + frame = self._color_func(frame) + + # Position + if self._side == "right": + frame, text = text, frame + + # Mode + if not mode: + out = "\r{0} {1}".format(frame, text) + else: + out = "{0} {1}\n".format(frame, text) + + # Ensure output is bytes for Py2 and Unicode for Py3 + assert isinstance(out, builtin_str) + + return out + + def _register_signal_handlers(self): + # SIGKILL cannot be caught or ignored, and the receiving + # process cannot perform any clean-up upon receiving this + # signal. + if signal.SIGKILL in self._sigmap.keys(): + raise ValueError( + "Trying to set handler for SIGKILL signal. " + "SIGKILL cannot be cought or ignored in POSIX systems." + ) + + for sig, sig_handler in iteritems(self._sigmap): + # A handler for a particular signal, once set, remains + # installed until it is explicitly reset. Store default + # signal handlers for subsequent reset at cleanup phase. + dfl_handler = signal.getsignal(sig) + self._dfl_sigmap[sig] = dfl_handler + + # ``signal.SIG_DFL`` and ``signal.SIG_IGN`` are also valid + # signal handlers and are not callables. + if callable(sig_handler): + # ``signal.signal`` accepts handler function which is + # called with two arguments: signal number and the + # interrupted stack frame. ``functools.partial`` solves + # the problem of passing spinner instance into the handler + # function. + sig_handler = functools.partial(sig_handler, spinner=self) + + signal.signal(sig, sig_handler) + + def _reset_signal_handlers(self): + for sig, sig_handler in iteritems(self._dfl_sigmap): + signal.signal(sig, sig_handler) + + # + # Static + # + @staticmethod + def _set_color(value): + # type: (str) -> str + available_values = [k for k, v in iteritems(COLOR_MAP) if v == "color"] + + if value not in available_values: + raise ValueError( + "'{0}': unsupported color value. Use one of the: {1}".format( + value, ", ".join(available_values) + ) + ) + return value + + @staticmethod + def _set_on_color(value): + # type: (str) -> str + available_values = [ + k for k, v in iteritems(COLOR_MAP) if v == "on_color" + ] + if value not in available_values: + raise ValueError( + "'{0}': unsupported on_color value. " + "Use one of the: {1}".format( + value, ", ".join(available_values) + ) + ) + return value + + @staticmethod + def _set_attrs(attrs): + # type: (List[str]) -> Set[str] + available_values = [k for k, v in iteritems(COLOR_MAP) if v == "attrs"] + + for attr in attrs: + if attr not in available_values: + raise ValueError( + "'{0}': unsupported attribute value. " + "Use one of the: {1}".format( + attr, ", ".join(available_values) + ) + ) + return set(attrs) + + @staticmethod + def _set_spinner(spinner): + if not spinner: + sp = default_spinner + + if hasattr(spinner, "frames") and hasattr(spinner, "interval"): + if not spinner.frames or not spinner.interval: + sp = default_spinner + else: + sp = spinner + else: + sp = default_spinner + + return sp + + @staticmethod + def _set_side(side): + # type: (str) -> str + if side not in ("left", "right"): + raise ValueError( + "'{0}': unsupported side value. " + "Use either 'left' or 'right'." + ) + return side + + @staticmethod + def _set_frames(spinner, reversal): + # type: (base_spinner.Spinner, bool) -> Union[str, List] + uframes = None # unicode frames + uframes_seq = None # sequence of unicode frames + + if isinstance(spinner.frames, basestring): + uframes = to_unicode(spinner.frames) if PY2 else spinner.frames + + # TODO (pavdmyt): support any type that implements iterable + if isinstance(spinner.frames, (list, tuple)): + + # Empty ``spinner.frames`` is handled by ``Yaspin._set_spinner`` + if spinner.frames and isinstance(spinner.frames[0], bytes): + uframes_seq = [to_unicode(frame) for frame in spinner.frames] + else: + uframes_seq = spinner.frames + + _frames = uframes or uframes_seq + if not _frames: + # Empty ``spinner.frames`` is handled by ``Yaspin._set_spinner``. + # This code is very unlikely to be executed. However, it's still + # here to be on a safe side. + raise ValueError( + "{0!r}: no frames found in spinner".format(spinner) + ) + + # Builtin ``reversed`` returns reverse iterator, + # which adds unnecessary difficulty for returning + # unicode value; + # Hence using [::-1] syntax + frames = _frames[::-1] if reversal else _frames + + return frames + + @staticmethod + def _set_interval(spinner): + # Milliseconds to Seconds + return spinner.interval * 0.001 + + @staticmethod + def _set_cycle(frames): + return itertools.cycle(frames) + + @staticmethod + def _set_text(text): + if PY2: + return to_unicode(text) + return text + + @staticmethod + def _hide_cursor(): + sys.stdout.write("\033[?25l") + sys.stdout.flush() + + @staticmethod + def _show_cursor(): + sys.stdout.write("\033[?25h") + sys.stdout.flush() + + @staticmethod + def _clear_line(): + sys.stdout.write("\033[K") diff --git a/pipenv/vendor/yaspin/data/spinners.json b/pipenv/vendor/yaspin/data/spinners.json new file mode 100644 index 0000000000..b388b2a581 --- /dev/null +++ b/pipenv/vendor/yaspin/data/spinners.json @@ -0,0 +1,912 @@ +{ + "dots": { + "interval": 80, + "frames": [ + "⠋", + "⠙", + "⠹", + "⠸", + "⠼", + "⠴", + "⠦", + "⠧", + "⠇", + "⠏" + ] + }, + "dots2": { + "interval": 80, + "frames": [ + "⣾", + "⣽", + "⣻", + "⢿", + "⡿", + "⣟", + "⣯", + "⣷" + ] + }, + "dots3": { + "interval": 80, + "frames": [ + "⠋", + "⠙", + "⠚", + "⠞", + "⠖", + "⠦", + "⠴", + "⠲", + "⠳", + "⠓" + ] + }, + "dots4": { + "interval": 80, + "frames": [ + "⠄", + "⠆", + "⠇", + "⠋", + "⠙", + "⠸", + "⠰", + "⠠", + "⠰", + "⠸", + "⠙", + "⠋", + "⠇", + "⠆" + ] + }, + "dots5": { + "interval": 80, + "frames": [ + "⠋", + "⠙", + "⠚", + "⠒", + "⠂", + "⠂", + "⠒", + "⠲", + "⠴", + "⠦", + "⠖", + "⠒", + "⠐", + "⠐", + "⠒", + "⠓", + "⠋" + ] + }, + "dots6": { + "interval": 80, + "frames": [ + "⠁", + "⠉", + "⠙", + "⠚", + "⠒", + "⠂", + "⠂", + "⠒", + "⠲", + "⠴", + "⠤", + "⠄", + "⠄", + "⠤", + "⠴", + "⠲", + "⠒", + "⠂", + "⠂", + "⠒", + "⠚", + "⠙", + "⠉", + "⠁" + ] + }, + "dots7": { + "interval": 80, + "frames": [ + "⠈", + "⠉", + "⠋", + "⠓", + "⠒", + "⠐", + "⠐", + "⠒", + "⠖", + "⠦", + "⠤", + "⠠", + "⠠", + "⠤", + "⠦", + "⠖", + "⠒", + "⠐", + "⠐", + "⠒", + "⠓", + "⠋", + "⠉", + "⠈" + ] + }, + "dots8": { + "interval": 80, + "frames": [ + "⠁", + "⠁", + "⠉", + "⠙", + "⠚", + "⠒", + "⠂", + "⠂", + "⠒", + "⠲", + "⠴", + "⠤", + "⠄", + "⠄", + "⠤", + "⠠", + "⠠", + "⠤", + "⠦", + "⠖", + "⠒", + "⠐", + "⠐", + "⠒", + "⠓", + "⠋", + "⠉", + "⠈", + "⠈" + ] + }, + "dots9": { + "interval": 80, + "frames": [ + "⢹", + "⢺", + "⢼", + "⣸", + "⣇", + "⡧", + "⡗", + "⡏" + ] + }, + "dots10": { + "interval": 80, + "frames": [ + "⢄", + "⢂", + "⢁", + "⡁", + "⡈", + "⡐", + "⡠" + ] + }, + "dots11": { + "interval": 100, + "frames": [ + "⠁", + "⠂", + "⠄", + "⡀", + "⢀", + "⠠", + "⠐", + "⠈" + ] + }, + "dots12": { + "interval": 80, + "frames": [ + "⢀⠀", + "⡀⠀", + "⠄⠀", + "⢂⠀", + "⡂⠀", + "⠅⠀", + "⢃⠀", + "⡃⠀", + "⠍⠀", + "⢋⠀", + "⡋⠀", + "⠍⠁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⢈⠩", + "⡀⢙", + "⠄⡙", + "⢂⠩", + "⡂⢘", + "⠅⡘", + "⢃⠨", + "⡃⢐", + "⠍⡐", + "⢋⠠", + "⡋⢀", + "⠍⡁", + "⢋⠁", + "⡋⠁", + "⠍⠉", + "⠋⠉", + "⠋⠉", + "⠉⠙", + "⠉⠙", + "⠉⠩", + "⠈⢙", + "⠈⡙", + "⠈⠩", + "⠀⢙", + "⠀⡙", + "⠀⠩", + "⠀⢘", + "⠀⡘", + "⠀⠨", + "⠀⢐", + "⠀⡐", + "⠀⠠", + "⠀⢀", + "⠀⡀" + ] + }, + "line": { + "interval": 130, + "frames": [ + "-", + "\\", + "|", + "/" + ] + }, + "line2": { + "interval": 100, + "frames": [ + "⠂", + "-", + "–", + "—", + "–", + "-" + ] + }, + "pipe": { + "interval": 100, + "frames": [ + "┤", + "┘", + "┴", + "└", + "├", + "┌", + "┬", + "┐" + ] + }, + "simpleDots": { + "interval": 400, + "frames": [ + ". ", + ".. ", + "...", + " " + ] + }, + "simpleDotsScrolling": { + "interval": 200, + "frames": [ + ". ", + ".. ", + "...", + " ..", + " .", + " " + ] + }, + "star": { + "interval": 70, + "frames": [ + "✶", + "✸", + "✹", + "✺", + "✹", + "✷" + ] + }, + "star2": { + "interval": 80, + "frames": [ + "+", + "x", + "*" + ] + }, + "flip": { + "interval": 70, + "frames": [ + "_", + "_", + "_", + "-", + "`", + "`", + "'", + "´", + "-", + "_", + "_", + "_" + ] + }, + "hamburger": { + "interval": 100, + "frames": [ + "☱", + "☲", + "☴" + ] + }, + "growVertical": { + "interval": 120, + "frames": [ + "▁", + "▃", + "▄", + "▅", + "▆", + "▇", + "▆", + "▅", + "▄", + "▃" + ] + }, + "growHorizontal": { + "interval": 120, + "frames": [ + "▏", + "▎", + "▍", + "▌", + "▋", + "▊", + "▉", + "▊", + "▋", + "▌", + "▍", + "▎" + ] + }, + "balloon": { + "interval": 140, + "frames": [ + " ", + ".", + "o", + "O", + "@", + "*", + " " + ] + }, + "balloon2": { + "interval": 120, + "frames": [ + ".", + "o", + "O", + "°", + "O", + "o", + "." + ] + }, + "noise": { + "interval": 100, + "frames": [ + "▓", + "▒", + "░" + ] + }, + "bounce": { + "interval": 120, + "frames": [ + "⠁", + "⠂", + "⠄", + "⠂" + ] + }, + "boxBounce": { + "interval": 120, + "frames": [ + "▖", + "▘", + "▝", + "▗" + ] + }, + "boxBounce2": { + "interval": 100, + "frames": [ + "▌", + "▀", + "▐", + "▄" + ] + }, + "triangle": { + "interval": 50, + "frames": [ + "◢", + "◣", + "◤", + "◥" + ] + }, + "arc": { + "interval": 100, + "frames": [ + "◜", + "◠", + "◝", + "◞", + "◡", + "◟" + ] + }, + "circle": { + "interval": 120, + "frames": [ + "◡", + "⊙", + "◠" + ] + }, + "squareCorners": { + "interval": 180, + "frames": [ + "◰", + "◳", + "◲", + "◱" + ] + }, + "circleQuarters": { + "interval": 120, + "frames": [ + "◴", + "◷", + "◶", + "◵" + ] + }, + "circleHalves": { + "interval": 50, + "frames": [ + "◐", + "◓", + "◑", + "◒" + ] + }, + "squish": { + "interval": 100, + "frames": [ + "╫", + "╪" + ] + }, + "toggle": { + "interval": 250, + "frames": [ + "⊶", + "⊷" + ] + }, + "toggle2": { + "interval": 80, + "frames": [ + "▫", + "▪" + ] + }, + "toggle3": { + "interval": 120, + "frames": [ + "□", + "■" + ] + }, + "toggle4": { + "interval": 100, + "frames": [ + "■", + "□", + "▪", + "▫" + ] + }, + "toggle5": { + "interval": 100, + "frames": [ + "▮", + "▯" + ] + }, + "toggle6": { + "interval": 300, + "frames": [ + "ဝ", + "၀" + ] + }, + "toggle7": { + "interval": 80, + "frames": [ + "⦾", + "⦿" + ] + }, + "toggle8": { + "interval": 100, + "frames": [ + "◍", + "◌" + ] + }, + "toggle9": { + "interval": 100, + "frames": [ + "◉", + "◎" + ] + }, + "toggle10": { + "interval": 100, + "frames": [ + "㊂", + "㊀", + "㊁" + ] + }, + "toggle11": { + "interval": 50, + "frames": [ + "⧇", + "⧆" + ] + }, + "toggle12": { + "interval": 120, + "frames": [ + "☗", + "☖" + ] + }, + "toggle13": { + "interval": 80, + "frames": [ + "=", + "*", + "-" + ] + }, + "arrow": { + "interval": 100, + "frames": [ + "←", + "↖", + "↑", + "↗", + "→", + "↘", + "↓", + "↙" + ] + }, + "arrow2": { + "interval": 80, + "frames": [ + "⬆️ ", + "↗️ ", + "➡️ ", + "↘️ ", + "⬇️ ", + "↙️ ", + "⬅️ ", + "↖️ " + ] + }, + "arrow3": { + "interval": 120, + "frames": [ + "▹▹▹▹▹", + "▸▹▹▹▹", + "▹▸▹▹▹", + "▹▹▸▹▹", + "▹▹▹▸▹", + "▹▹▹▹▸" + ] + }, + "bouncingBar": { + "interval": 80, + "frames": [ + "[ ]", + "[= ]", + "[== ]", + "[=== ]", + "[ ===]", + "[ ==]", + "[ =]", + "[ ]", + "[ =]", + "[ ==]", + "[ ===]", + "[====]", + "[=== ]", + "[== ]", + "[= ]" + ] + }, + "bouncingBall": { + "interval": 80, + "frames": [ + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "( ●)", + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "(● )" + ] + }, + "smiley": { + "interval": 200, + "frames": [ + "😄 ", + "😝 " + ] + }, + "monkey": { + "interval": 300, + "frames": [ + "🙈 ", + "🙈 ", + "🙉 ", + "🙊 " + ] + }, + "hearts": { + "interval": 100, + "frames": [ + "💛 ", + "💙 ", + "💜 ", + "💚 ", + "❤️ " + ] + }, + "clock": { + "interval": 100, + "frames": [ + "🕛 ", + "🕐 ", + "🕑 ", + "🕒 ", + "🕓 ", + "🕔 ", + "🕕 ", + "🕖 ", + "🕗 ", + "🕘 ", + "🕙 ", + "🕚 " + ] + }, + "earth": { + "interval": 180, + "frames": [ + "🌍 ", + "🌎 ", + "🌏 " + ] + }, + "moon": { + "interval": 80, + "frames": [ + "🌑 ", + "🌒 ", + "🌓 ", + "🌔 ", + "🌕 ", + "🌖 ", + "🌗 ", + "🌘 " + ] + }, + "runner": { + "interval": 140, + "frames": [ + "🚶 ", + "🏃 " + ] + }, + "pong": { + "interval": 80, + "frames": [ + "▐⠂ ▌", + "▐⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂▌", + "▐ ⠠▌", + "▐ ⡀▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐ ⠠ ▌", + "▐ ⠂ ▌", + "▐ ⠈ ▌", + "▐ ⠂ ▌", + "▐ ⠠ ▌", + "▐ ⡀ ▌", + "▐⠠ ▌" + ] + }, + "shark": { + "interval": 120, + "frames": [ + "▐|\\____________▌", + "▐_|\\___________▌", + "▐__|\\__________▌", + "▐___|\\_________▌", + "▐____|\\________▌", + "▐_____|\\_______▌", + "▐______|\\______▌", + "▐_______|\\_____▌", + "▐________|\\____▌", + "▐_________|\\___▌", + "▐__________|\\__▌", + "▐___________|\\_▌", + "▐____________|\\▌", + "▐____________/|▌", + "▐___________/|_▌", + "▐__________/|__▌", + "▐_________/|___▌", + "▐________/|____▌", + "▐_______/|_____▌", + "▐______/|______▌", + "▐_____/|_______▌", + "▐____/|________▌", + "▐___/|_________▌", + "▐__/|__________▌", + "▐_/|___________▌", + "▐/|____________▌" + ] + }, + "dqpb": { + "interval": 100, + "frames": [ + "d", + "q", + "p", + "b" + ] + }, + "weather": { + "interval": 100, + "frames": [ + "☀️ ", + "☀️ ", + "☀️ ", + "🌤 ", + "⛅️ ", + "🌥 ", + "☁️ ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "🌧 ", + "🌨 ", + "⛈ ", + "🌨 ", + "🌧 ", + "🌨 ", + "☁️ ", + "🌥 ", + "⛅️ ", + "🌤 ", + "☀️ ", + "☀️ " + ] + }, + "christmas": { + "interval": 400, + "frames": [ + "🌲", + "🎄" + ] + }, + "grenade": { + "interval": 80, + "frames": [ + "، ", + "′ ", + " ´ ", + " ‾ ", + " ⸌", + " ⸊", + " |", + " ⁎", + " ⁕", + " ෴ ", + " ⁓", + " ", + " ", + " " + ] + }, + "point": { + "interval": 125, + "frames": [ + "∙∙∙", + "●∙∙", + "∙●∙", + "∙∙●", + "∙∙∙" + ] + }, + "layer": { + "interval": 150, + "frames": [ + "-", + "=", + "≡" + ] + } +} diff --git a/pipenv/vendor/yaspin/helpers.py b/pipenv/vendor/yaspin/helpers.py new file mode 100644 index 0000000000..49ce0d06ca --- /dev/null +++ b/pipenv/vendor/yaspin/helpers.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.helpers +~~~~~~~~~~~~~~ + +Helper functions. +""" + +from __future__ import absolute_import + +from .compat import bytes +from .constants import ENCODING + + +def to_unicode(text_type, encoding=ENCODING): + if isinstance(text_type, bytes): + return text_type.decode(encoding) + return text_type diff --git a/pipenv/vendor/yaspin/signal_handlers.py b/pipenv/vendor/yaspin/signal_handlers.py new file mode 100644 index 0000000000..f38f5d6b78 --- /dev/null +++ b/pipenv/vendor/yaspin/signal_handlers.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.signal_handlers +~~~~~~~~~~~~~~~~~~~~~~ + +Callback functions or "signal handlers", that are invoked +when the signal occurs. +""" + +import sys + + +def default_handler(signum, frame, spinner): + """Signal handler, used to gracefully shut down the ``spinner`` instance + when specified signal is received by the process running the ``spinner``. + + ``signum`` and ``frame`` are mandatory arguments. Check ``signal.signal`` + function for more details. + """ + spinner.fail() + spinner.stop() + sys.exit(0) + + +def fancy_handler(signum, frame, spinner): + """Signal handler, used to gracefully shut down the ``spinner`` instance + when specified signal is received by the process running the ``spinner``. + + ``signum`` and ``frame`` are mandatory arguments. Check ``signal.signal`` + function for more details. + """ + spinner.red.fail("✘") + spinner.stop() + sys.exit(0) diff --git a/pipenv/vendor/yaspin/spinners.py b/pipenv/vendor/yaspin/spinners.py new file mode 100644 index 0000000000..9c3fa7b84b --- /dev/null +++ b/pipenv/vendor/yaspin/spinners.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- + +""" +yaspin.spinners +~~~~~~~~~~~~~~~ + +A collection of cli spinners. +""" + +import codecs +import os +from collections import namedtuple + +try: + import simplejson as json +except ImportError: + import json + + +THIS_DIR = os.path.dirname(os.path.realpath(__file__)) +SPINNERS_PATH = os.path.join(THIS_DIR, "data/spinners.json") + + +def _hook(dct): + return namedtuple("Spinner", dct.keys())(*dct.values()) + + +with codecs.open(SPINNERS_PATH, encoding="utf-8") as f: + Spinners = json.load(f, object_hook=_hook) diff --git a/pipenv/vendor/yaspin/termcolor.py b/pipenv/vendor/yaspin/termcolor.py new file mode 100644 index 0000000000..f11b824b28 --- /dev/null +++ b/pipenv/vendor/yaspin/termcolor.py @@ -0,0 +1,168 @@ +# coding: utf-8 +# Copyright (c) 2008-2011 Volvox Development Team +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# Author: Konstantin Lepa + +"""ANSII Color formatting for output in terminal.""" + +from __future__ import print_function +import os + + +__ALL__ = [ 'colored', 'cprint' ] + +VERSION = (1, 1, 0) + +ATTRIBUTES = dict( + list(zip([ + 'bold', + 'dark', + '', + 'underline', + 'blink', + '', + 'reverse', + 'concealed' + ], + list(range(1, 9)) + )) + ) +del ATTRIBUTES[''] + + +HIGHLIGHTS = dict( + list(zip([ + 'on_grey', + 'on_red', + 'on_green', + 'on_yellow', + 'on_blue', + 'on_magenta', + 'on_cyan', + 'on_white' + ], + list(range(40, 48)) + )) + ) + + +COLORS = dict( + list(zip([ + 'grey', + 'red', + 'green', + 'yellow', + 'blue', + 'magenta', + 'cyan', + 'white', + ], + list(range(30, 38)) + )) + ) + + +RESET = '\033[0m' + + +def colored(text, color=None, on_color=None, attrs=None): + """Colorize text. + + Available text colors: + red, green, yellow, blue, magenta, cyan, white. + + Available text highlights: + on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white. + + Available attributes: + bold, dark, underline, blink, reverse, concealed. + + Example: + colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink']) + colored('Hello, World!', 'green') + """ + if os.getenv('ANSI_COLORS_DISABLED') is None: + fmt_str = '\033[%dm%s' + if color is not None: + text = fmt_str % (COLORS[color], text) + + if on_color is not None: + text = fmt_str % (HIGHLIGHTS[on_color], text) + + if attrs is not None: + for attr in attrs: + text = fmt_str % (ATTRIBUTES[attr], text) + + text += RESET + return text + + +def cprint(text, color=None, on_color=None, attrs=None, **kwargs): + """Print colorize text. + + It accepts arguments of print function. + """ + + print((colored(text, color, on_color, attrs)), **kwargs) + + +if __name__ == '__main__': + print('Current terminal type: %s' % os.getenv('TERM')) + print('Test basic colors:') + cprint('Grey color', 'grey') + cprint('Red color', 'red') + cprint('Green color', 'green') + cprint('Yellow color', 'yellow') + cprint('Blue color', 'blue') + cprint('Magenta color', 'magenta') + cprint('Cyan color', 'cyan') + cprint('White color', 'white') + print(('-' * 78)) + + print('Test highlights:') + cprint('On grey color', on_color='on_grey') + cprint('On red color', on_color='on_red') + cprint('On green color', on_color='on_green') + cprint('On yellow color', on_color='on_yellow') + cprint('On blue color', on_color='on_blue') + cprint('On magenta color', on_color='on_magenta') + cprint('On cyan color', on_color='on_cyan') + cprint('On white color', color='grey', on_color='on_white') + print('-' * 78) + + print('Test attributes:') + cprint('Bold grey color', 'grey', attrs=['bold']) + cprint('Dark red color', 'red', attrs=['dark']) + cprint('Underline green color', 'green', attrs=['underline']) + cprint('Blink yellow color', 'yellow', attrs=['blink']) + cprint('Reversed blue color', 'blue', attrs=['reverse']) + cprint('Concealed Magenta color', 'magenta', attrs=['concealed']) + cprint('Bold underline reverse cyan color', 'cyan', + attrs=['bold', 'underline', 'reverse']) + cprint('Dark blink concealed white color', 'white', + attrs=['dark', 'blink', 'concealed']) + print(('-' * 78)) + + print('Test mixing:') + cprint('Underline red on grey color', 'red', 'on_grey', + ['underline']) + cprint('Reversed green on red color', 'green', 'on_red', ['reverse']) + diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 6dff468ac8..80b3f9e58c 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -19,10 +19,10 @@ index 4e6174c..75f9b49 100644 # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py -index 1c4b943..84077f0 100644 +index bf69803..eb20560 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py -@@ -1,9 +1,10 @@ +@@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import (absolute_import, division, print_function, unicode_literals) @@ -30,40 +30,47 @@ index 1c4b943..84077f0 100644 +import copy import hashlib import os -+import sys from contextlib import contextmanager - from shutil import rmtree - -@@ -15,13 +16,22 @@ from .._compat import ( +@@ -15,26 +15,70 @@ from .._compat import ( Wheel, FAVORITE_HASH, TemporaryDirectory, - PyPI + PyPI, + InstallRequirement, -+ SafeFileCache, ++ SafeFileCache ) ++os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" ++from pip_shims.shims import pip_import, VcsSupport, WheelCache ++from packaging.requirements import Requirement ++from packaging.specifiers import SpecifierSet, Specifier ++from packaging.markers import Op, Value, Variable, Marker ++InstallationError = pip_import("InstallationError", "exceptions.InstallationError", "7.0", "9999") ++from notpip._internal.resolve import Resolver as PipResolver ++ -from ..cache import CACHE_DIR -+from pip._vendor.packaging.requirements import Requirement -+from pip._vendor.packaging.specifiers import SpecifierSet, Specifier -+from pip._vendor.packaging.markers import Op, Value, Variable -+from pip._internal.exceptions import InstallationError -+from pip._internal.vcs import VcsSupport -+ -+from pipenv.environments import PIPENV_CACHE_DIR ++from pipenv.environments import PIPENV_CACHE_DIR as CACHE_DIR from ..exceptions import NoCandidateFound - from ..utils import (fs_str, is_pinned_requirement, lookup_table, +-from ..utils import (fs_str, is_pinned_requirement, lookup_table, - make_install_requirement) ++from ..utils import (fs_str, is_pinned_requirement, lookup_table, dedup, + make_install_requirement, clean_requires_python) -+ from .base import BaseRepository + try: +- from pip._internal.req.req_tracker import RequirementTracker ++ from notpip._internal.req.req_tracker import RequirementTracker + except ImportError: + @contextmanager + def RequirementTracker(): + yield -@@ -37,6 +47,45 @@ except ImportError: - from pip.wheel import WheelCache - - +-try: +- from pip._internal.cache import WheelCache +-except ImportError: +- from pip.wheel import WheelCache ++ +class HashCache(SafeFileCache): + """Caches hashes of PyPI artifacts so we do not need to re-download them + @@ -73,7 +80,7 @@ index 1c4b943..84077f0 100644 + def __init__(self, *args, **kwargs): + session = kwargs.pop('session') + self.session = session -+ kwargs.setdefault('directory', os.path.join(PIPENV_CACHE_DIR, 'hash-cache')) ++ kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache')) + super(HashCache, self).__init__(*args, **kwargs) + + def get_hash(self, location): @@ -101,12 +108,10 @@ index 1c4b943..84077f0 100644 + for chunk in iter(lambda: fp.read(8096), b""): + h.update(chunk) + return ":".join([FAVORITE_HASH, h.hexdigest()]) -+ -+ - class PyPIRepository(BaseRepository): - DEFAULT_INDEX_URL = PyPI.simple_url -@@ -46,10 +95,11 @@ class PyPIRepository(BaseRepository): + + class PyPIRepository(BaseRepository): +@@ -46,8 +90,9 @@ class PyPIRepository(BaseRepository): config), but any other PyPI mirror can be used if index_urls is changed/configured on the Finder. """ @@ -115,12 +120,9 @@ index 1c4b943..84077f0 100644 self.session = session + self.use_json = use_json self.pip_options = pip_options -- self.wheel_cache = WheelCache(CACHE_DIR, pip_options.format_control) -+ self.wheel_cache = WheelCache(PIPENV_CACHE_DIR, pip_options.format_control) index_urls = [pip_options.index_url] + pip_options.extra_index_urls - if pip_options.no_index: -@@ -74,11 +124,15 @@ class PyPIRepository(BaseRepository): +@@ -73,6 +118,10 @@ class PyPIRepository(BaseRepository): # of all secondary dependencies for the given requirement, so we # only have to go to disk once for each requirement self._dependencies_cache = {} @@ -131,20 +133,12 @@ index 1c4b943..84077f0 100644 # Setup file paths self.freshen_build_caches() -- self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs')) -- self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels')) -+ self._download_dir = fs_str(os.path.join(PIPENV_CACHE_DIR, 'pkgs')) -+ self._wheel_download_dir = fs_str(os.path.join(PIPENV_CACHE_DIR, 'wheels')) - - def freshen_build_caches(self): - """ -@@ -114,10 +168,14 @@ class PyPIRepository(BaseRepository): +@@ -113,10 +162,13 @@ class PyPIRepository(BaseRepository): if ireq.editable: return ireq # return itself as the best match - all_candidates = self.find_all_candidates(ireq.name) + all_candidates = clean_requires_python(self.find_all_candidates(ireq.name)) -+ candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True) - matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates), + try: @@ -155,21 +149,15 @@ index 1c4b943..84077f0 100644 # Reuses pip's internal candidate sort key to sort matching_candidates = [candidates_by_version[ver] for ver in matching_versions] -@@ -126,11 +184,71 @@ class PyPIRepository(BaseRepository): - best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key) +@@ -126,25 +178,84 @@ class PyPIRepository(BaseRepository): # Turn the candidate into a pinned InstallRequirement -- return make_install_requirement( + return make_install_requirement( - best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint - ) -+ new_req = make_install_requirement( -+ best_candidate.project, best_candidate.version, ireq.extras, ireq.markers, constraint=ireq.constraint ++ best_candidate.project, best_candidate.version, ireq.extras, ireq.markers, constraint=ireq.constraint + ) + -+ # KR TODO: Marker here? -+ -+ return new_req -+ + def get_json_dependencies(self, ireq): + + if not (is_pinned_requirement(ireq)): @@ -178,7 +166,8 @@ index 1c4b943..84077f0 100644 + def gen(ireq): + if self.DEFAULT_INDEX_URL not in self.finder.index_urls: + return -+ + +- def resolve_reqs(self, download_dir, ireq, wheel_cache): + url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name) + releases = self.session.get(url).json()['releases'] + @@ -211,8 +200,8 @@ index 1c4b943..84077f0 100644 + return set(self._json_dep_cache[ireq]) + except Exception: + return set() - - def get_dependencies(self, ireq): ++ ++ def get_dependencies(self, ireq): + json_results = set() + + if self.use_json: @@ -226,100 +215,98 @@ index 1c4b943..84077f0 100644 + + return json_results + -+ def get_legacy_dependencies(self, ireq): - """ - Given a pinned or an editable InstallRequirement, returns a set of - dependencies (also InstallRequirements, but not necessarily pinned). -@@ -155,20 +273,46 @@ class PyPIRepository(BaseRepository): - os.makedirs(download_dir) - if not os.path.isdir(self._wheel_download_dir): - os.makedirs(self._wheel_download_dir) -- ++ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist=None): + results = None ++ setup_requires = {} ++ dist = None + try: +- from pip._internal.operations.prepare import RequirementPreparer +- from pip._internal.resolve import Resolver as PipResolver ++ from notpip._internal.operations.prepare import RequirementPreparer + except ImportError: +- # Pip 9 and below ++ # Pip 9 and below + reqset = RequirementSet( + self.build_dir, + self.source_dir, + download_dir=download_dir, + wheel_download_dir=self._wheel_download_dir, + session=self.session, ++ ignore_installed=True, ++ ignore_compatibility=False, + wheel_cache=wheel_cache + ) +- results = reqset._prepare_file(self.finder, ireq) ++ results = reqset._prepare_file(self.finder, ireq, ignore_requires_python=True) + else: + # pip >= 10 + preparer_kwargs = { +@@ -153,19 +264,20 @@ class PyPIRepository(BaseRepository): + 'download_dir': download_dir, + 'wheel_download_dir': self._wheel_download_dir, + 'progress_bar': 'off', +- 'build_isolation': False ++ 'build_isolation': True + } + resolver_kwargs = { + 'finder': self.finder, + 'session': self.session, + 'upgrade_strategy': "to-satisfy-only", +- 'force_reinstall': False, ++ 'force_reinstall': True, + 'ignore_dependencies': False, +- 'ignore_requires_python': False, ++ 'ignore_requires_python': True, + 'ignore_installed': True, + 'isolated': False, + 'wheel_cache': wheel_cache, +- 'use_user_site': False ++ 'use_user_site': False, ++ 'ignore_compatibility': True + } + resolver = None + preparer = None +@@ -177,15 +289,98 @@ class PyPIRepository(BaseRepository): + resolver_kwargs['preparer'] = preparer + reqset = RequirementSet() + ireq.is_direct = True +- reqset.add_requirement(ireq) ++ # reqset.add_requirement(ireq) + resolver = PipResolver(**resolver_kwargs) + resolver.require_hashes = False + results = resolver._resolve_one(reqset, ireq) + reqset.cleanup_files() + +- return set(results) ++ if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): + # Collect setup_requires info from local eggs. + # Do this after we call the preparer on these reqs to make sure their + # egg info has been created -+ setup_requires = {} -+ dist = None -+ if ireq.editable: -+ try: -+ from pipenv.utils import chdir -+ with chdir(ireq.setup_py_dir): -+ from setuptools.dist import distutils -+ dist = distutils.core.run_setup(ireq.setup_py) -+ except (ImportError, InstallationError, TypeError, AttributeError): -+ pass ++ from pipenv.utils import chdir ++ with chdir(ireq.setup_py_dir): + try: -+ dist = ireq.get_dist() if not dist else dist ++ from setuptools.dist import distutils ++ dist = distutils.core.run_setup(ireq.setup_py) + except InstallationError: + ireq.run_egg_info() -+ dist = ireq.get_dist() + except (TypeError, ValueError, AttributeError): + pass -+ else: -+ setup_requires = getattr(dist, "extras_require", None) -+ if not setup_requires: -+ setup_requires = {"setup_requires": getattr(dist, "setup_requires", None)} - try: -- # Pip < 9 and below -+ # Pip 9 and below - reqset = RequirementSet( - self.build_dir, - self.source_dir, - download_dir=download_dir, - wheel_download_dir=self._wheel_download_dir, - session=self.session, -+ ignore_installed=True, -+ ignore_compatibility=False, - wheel_cache=self.wheel_cache, - ) -- self._dependencies_cache[ireq] = reqset._prepare_file( -+ result = reqset._prepare_file( - self.finder, -- ireq -+ ireq, -+ ignore_requires_python=True - ) - except TypeError: - # Pip >= 10 (new resolver!) -@@ -188,17 +332,97 @@ class PyPIRepository(BaseRepository): - finder=self.finder, - session=self.session, - upgrade_strategy="to-satisfy-only", -- force_reinstall=False, -+ force_reinstall=True, - ignore_dependencies=False, -- ignore_requires_python=False, -+ ignore_requires_python=True, - ignore_installed=True, - isolated=False, - wheel_cache=self.wheel_cache, - use_user_site=False, -+ ignore_compatibility=False - ) - self.resolver.resolve(reqset) -- self._dependencies_cache[ireq] = reqset.requirements.values() -+ result = set(reqset.requirements.values()) -+ -+ # HACK: Sometimes the InstallRequirement doesn't properly get -+ # these values set on it during the resolution process. It's -+ # difficult to pin down what is going wrong. This fixes things. -+ if not getattr(ireq, 'version', None): -+ try: -+ dist = ireq.get_dist() if not dist else None -+ ireq.version = ireq.get_dist().version -+ except (ValueError, OSError, TypeError, AttributeError) as e: -+ pass -+ if not getattr(ireq, 'project_name', None): -+ try: -+ ireq.project_name = dist.project_name if dist else None -+ except (ValueError, TypeError) as e: -+ pass ++ if not dist: ++ try: ++ dist = ireq.get_dist() ++ except (ImportError, ValueError, TypeError, AttributeError): ++ pass ++ if ireq.editable and dist: ++ setup_requires = getattr(dist, "extras_require", None) ++ if not setup_requires: ++ setup_requires = {"setup_requires": getattr(dist, "setup_requires", None)} + if not getattr(ireq, 'req', None): + try: + ireq.req = dist.as_requirement() if dist else None + except (ValueError, TypeError) as e: + pass -+ + +- def get_dependencies(self, ireq): + # Convert setup_requires dict into a somewhat usable form. + if setup_requires: + for section in setup_requires: @@ -341,14 +328,14 @@ index 1c4b943..84077f0 100644 + if ':' not in value: + try: + if not not_python: -+ result = result + [InstallRequirement.from_line("{0}{1}".format(value, python_version).replace(':', ';'))] ++ results.add(InstallRequirement.from_line("{0}{1}".format(value, python_version).replace(':', ';'))) + # Anything could go wrong here -- can't be too careful. + except Exception: + pass + + # this section properly creates 'python_version' markers for cross-python + # virtualenv creation and for multi-python compatibility. -+ requires_python = reqset.requires_python if hasattr(reqset, 'requires_python') else self.resolver.requires_python ++ requires_python = reqset.requires_python if hasattr(reqset, 'requires_python') else resolver.requires_python + if requires_python: + marker_str = '' + # This corrects a logic error from the previous code which said that if @@ -358,31 +345,48 @@ index 1c4b943..84077f0 100644 + if any(requires_python.startswith(op) for op in Specifier._operators.keys()): + # We are checking first if we have leading specifier operator + # if not, we can assume we should be doing a == comparison -+ specifierset = list(SpecifierSet(requires_python)) ++ specifierset = SpecifierSet(requires_python) + # for multiple specifiers, the correct way to represent that in + # a specifierset is `Requirement('fakepkg; python_version<"3.0,>=2.6"')` -+ marker_key = Variable('python_version') -+ markers = [] -+ for spec in specifierset: -+ operator, val = spec._spec -+ operator = Op(operator) -+ val = Value(val) -+ markers.append(''.join([marker_key.serialize(), operator.serialize(), val.serialize()])) -+ marker_str = ' and '.join(markers) ++ from passa.internals.specifiers import cleanup_pyspecs ++ marker_str = str(Marker(" and ".join(dedup([ ++ "python_version {0[0]} '{0[1]}'".format(spec) ++ for spec in cleanup_pyspecs(specifierset) ++ ])))) + # The best way to add markers to a requirement is to make a separate requirement + # with only markers on it, and then to transfer the object istelf + marker_to_add = Requirement('fakepkg; {0}'.format(marker_str)).marker -+ result.remove(ireq) ++ if ireq in results: ++ results.remove(ireq) ++ print(marker_to_add) + ireq.req.marker = marker_to_add -+ result.add(ireq) + -+ self._dependencies_cache[ireq] = result - reqset.cleanup_files() ++ results = set(results) if results else set() ++ return results, ireq + - return set(self._dependencies_cache[ireq]) - - def get_hashes(self, ireq): -@@ -210,6 +434,10 @@ class PyPIRepository(BaseRepository): ++ def get_legacy_dependencies(self, ireq): + """ + Given a pinned or an editable InstallRequirement, returns a set of + dependencies (also InstallRequirements, but not necessarily pinned). +@@ -200,6 +395,7 @@ class PyPIRepository(BaseRepository): + # If a download_dir is passed, pip will unnecessarely + # archive the entire source directory + download_dir = None ++ + elif ireq.link and not ireq.link.is_artifact: + # No download_dir for VCS sources. This also works around pip + # using git-checkout-index, which gets rid of the .git dir. +@@ -214,7 +410,8 @@ class PyPIRepository(BaseRepository): + wheel_cache = WheelCache(CACHE_DIR, self.pip_options.format_control) + prev_tracker = os.environ.get('PIP_REQ_TRACKER') + try: +- self._dependencies_cache[ireq] = self.resolve_reqs(download_dir, ireq, wheel_cache) ++ results, ireq = self.resolve_reqs(download_dir, ireq, wheel_cache) ++ self._dependencies_cache[ireq] = results + finally: + if 'PIP_REQ_TRACKER' in os.environ: + if prev_tracker: +@@ -236,6 +433,10 @@ class PyPIRepository(BaseRepository): if ireq.editable: return set() @@ -393,7 +397,7 @@ index 1c4b943..84077f0 100644 if not is_pinned_requirement(ireq): raise TypeError( "Expected pinned requirement, got {}".format(ireq)) -@@ -217,24 +445,22 @@ class PyPIRepository(BaseRepository): +@@ -243,24 +444,22 @@ class PyPIRepository(BaseRepository): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. From 8811b69404ebeb467cbee28d9e860eae38119037 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 01:59:40 -0400 Subject: [PATCH 04/35] Vendor yaspin, update vistir, add pip18 patch Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 4 +- pipenv/vendor/vendor_pip.txt | 20 +- pipenv/vendor/vistir/__init__.py | 2 +- pipenv/vendor/vistir/misc.py | 83 +++- tasks/vendoring/patches/patched/pip18.patch | 495 ++++++++++++++++++++ 5 files changed, 580 insertions(+), 24 deletions(-) create mode 100644 tasks/vendoring/patches/patched/pip18.patch diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index d1667eeb00..52d52ec6ec 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -41,8 +41,10 @@ semver==2.8.1 shutilwhich==1.1.0 toml==0.9.4 cached-property==1.4.3 -vistir==0.1.4 +vistir==git+https://github.com/sarugaku/vistir.git@e50a767497a059cdd727583448b77c694374fa38#egg=vistir pip-shims==0.1.2 modutil==2.0.0 ptyprocess==0.6.0 enum34==1.1.6 +yaspin==0.14.0 +passa==git+https://github.com/sarugaku/passa.git@master#egg=passa diff --git a/pipenv/vendor/vendor_pip.txt b/pipenv/vendor/vendor_pip.txt index 3994e709a1..b9854e9ad1 100644 --- a/pipenv/vendor/vendor_pip.txt +++ b/pipenv/vendor/vendor_pip.txt @@ -1,22 +1,22 @@ appdirs==1.4.3 distlib==0.2.7 -distro==1.2.0 +distro==1.3.0 html5lib==1.0.1 six==1.11.0 colorama==0.3.9 -CacheControl==0.12.4 +CacheControl==0.12.5 msgpack-python==0.5.6 lockfile==0.12.2 -progress==1.3 -ipaddress==1.0.19 # Only needed on 2.6 and 2.7 +progress==1.4 +ipaddress==1.0.22 # Only needed on 2.6 and 2.7 packaging==17.1 pyparsing==2.2.0 -pytoml==0.1.14 +pytoml==0.1.16 retrying==1.3.3 -requests==2.18.4 +requests==2.19.1 chardet==3.0.4 - idna==2.6 - urllib3==1.22 - certifi==2018.1.18 -setuptools==39.1.0 + idna==2.7 + urllib3==1.23 + certifi==2018.4.16 +setuptools==39.2.0 webencodings==0.5.1 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index eeb8344cb0..94079aa5bb 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -13,7 +13,7 @@ from .path import mkdir_p, rmtree -__version__ = '0.1.4' +__version__ = '0.1.5.dev0' __all__ = [ diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 82bdf5ed47..98ebe502f5 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -8,10 +8,13 @@ import sys from collections import OrderedDict +from contextlib import contextmanager from functools import partial import six +from yaspin import yaspin, spinners + from .cmdparse import Script from .compat import Path, fs_str, partialmethod @@ -67,37 +70,87 @@ def dedup(iterable): return iter(OrderedDict.fromkeys(iterable)) -def _spawn_subprocess(script, env={}): +def _spawn_subprocess(script, env={}, block=True, cwd=None): from distutils.spawn import find_executable command = find_executable(script.command) options = { "env": env, "universal_newlines": True, "stdout": subprocess.PIPE, - "stderr": subprocess.PIPE, + "stderr": subprocess.PIPE if block else subprocess.STDOUT, + "stdin": None if block else subprocess.PIPE, + "shell": False } + if cwd: + options["cwd"] = cwd # Command not found, maybe this is a shell built-in? + cmd = [command] + script.args if not command: # Try to use CreateProcess directly if possible. - return subprocess.Popen(script.cmdify(), shell=True, **options) + cmd = script.cmdify() + options["shell"] = True + # Try to use CreateProcess directly if possible. Specifically catch # Windows error 193 "Command is not a valid Win32 application" to handle # a "command" that is non-executable. See pypa/pipenv#2727. try: - return subprocess.Popen([command] + script.args, **options) + return subprocess.Popen(cmd, **options) except WindowsError as e: if e.winerror != 193: raise + options["shell"] = True # Try shell mode to use Windows's file association for file launch. - return subprocess.Popen(script.cmdify(), shell=True, **options) + return subprocess.Popen(script.cmdify(), **options) -def run(cmd, env={}, return_object=False): +def _create_subprocess(cmd, env={}, block=True, return_object=False, cwd=os.curdir, verbose=False, spinner=None): + try: + c = _spawn_subprocess(cmd, env=env, block=block, cwd=cwd) + except Exception as exc: + print( + "Error %s while executing command %s", exc, " ".join(cmd._parts) + ) + raise + if not block: + c.stdin.close() + output = [] + if c.stdout is not None: + while True: + line = to_text(c.stdout.readline()) + if not line: + break + line = line.rstrip() + output.append(line) + if verbose: + print(line + "\n") + elif spinner: + spinner.text = line + else: + continue + try: + c.wait() + finally: + if c.stdout: + c.stdout.close() + c.out = "".join(output) + c.err = "" + else: + c.out, c.err = c.communicate() + if not return_object: + return c.out.strip(), c.err.strip() + return c + + +def run(cmd, env={}, return_object=False, block=True, cwd=None, verbose=False, nospin=False,): """Use `subprocess.Popen` to get the output of a command and decode it. :param list cmd: A list representing the command you want to run. :param dict env: Additional environment settings to pass through to the subprocess. :param bool return_object: When True, returns the whole subprocess instance - :returns: A 2-tuple of (output, error) + :param bool block: When False, returns a potentially still-running :class:`subprocess.Popen` instance + :param str cwd: Current working directory contect to use for spawning the subprocess. + :param bool verbose: Whether to print stdout in real time when non-blocking. + :param bool nospin: Whether to disable the cli spinner. + :returns: A 2-tuple of (output, error) or a :class:`subprocess.Popen` object. """ if six.PY2: fs_encode = partial(to_bytes, encoding=locale_encoding) @@ -113,11 +166,17 @@ def run(cmd, env={}, return_object=False): cmd = [c.encode("utf-8") for c in cmd] if not isinstance(cmd, Script): cmd = Script.parse(cmd) - c = _spawn_subprocess(cmd, env=_env) - out, err = c.communicate() - if not return_object: - return out.strip(), err.strip() - return c + spinner = yaspin + if nospin: + @contextmanager + def spinner(spin_type): + class FakeClass(object): + def __init__(self): + self.text = "" + myobj = FakeClass() + yield myobj + with spinner(spinners.Spinners.bouncingBar) as sp: + return _create_subprocess(cmd, env=_env, return_object=return_object, block=block, cwd=cwd, verbose=verbose, spinner=sp) def load_path(python): diff --git a/tasks/vendoring/patches/patched/pip18.patch b/tasks/vendoring/patches/patched/pip18.patch new file mode 100644 index 0000000000..bdb82b3ccf --- /dev/null +++ b/tasks/vendoring/patches/patched/pip18.patch @@ -0,0 +1,495 @@ +diff --git a/pipenv/patched/_internal/download.py b/pipenv/patched/_internal/download.py +index 96f3b65c..3fb4ebef 100644 +--- a/pipenv/patched/_internal/download.py ++++ b/pipenv/patched/_internal/download.py +@@ -19,6 +19,7 @@ from pip._vendor.lockfile import LockError + from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter + from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth + from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response ++from pip._vendor.requests.sessions import Session + from pip._vendor.requests.structures import CaseInsensitiveDict + from pip._vendor.requests.utils import get_netrc_auth + # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +@@ -69,7 +70,7 @@ def user_agent(): + Return a string representing the user agent. + """ + data = { +- "installer": {"name": "pip", "version": pip.__version__}, ++ "installer": {"name": "pip", "version": pipenv.patched.notpip.__version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), +@@ -322,7 +323,7 @@ class InsecureHTTPAdapter(HTTPAdapter): + conn.ca_certs = None + + +-class PipSession(requests.Session): ++class PipSession(Session): + + timeout = None + +@@ -752,7 +753,7 @@ def _copy_dist_from_dir(link_path, location): + + # build an sdist + setup_py = 'setup.py' +- sdist_args = [sys.executable] ++ sdist_args = [os.environ.get('PIP_PYTHON_PATH', sys.executable)] + sdist_args.append('-c') + sdist_args.append(SETUPTOOLS_SHIM % setup_py) + sdist_args.append('sdist') +diff --git a/pipenv/patched/_internal/index.py b/pipenv/patched/_internal/index.py +index 8c0ec82c..ad00ba04 100644 +--- a/pipenv/patched/_internal/index.py ++++ b/pipenv/patched/_internal/index.py +@@ -58,11 +58,12 @@ logger = logging.getLogger(__name__) + + class InstallationCandidate(object): + +- def __init__(self, project, version, location): ++ def __init__(self, project, version, location, requires_python=None): + self.project = project + self.version = parse_version(version) + self.location = location + self._key = (self.project, self.version, self.location) ++ self.requires_python = requires_python + + def __repr__(self): + return "".format( +@@ -168,6 +169,9 @@ class PackageFinder(object): + # The Session we'll use to make requests + self.session = session + ++ # Kenneth's Hack ++ self.extra = None ++ + # The valid tags to check potential found wheel candidates against + self.valid_tags = get_supported( + versions=versions, +@@ -220,6 +224,24 @@ class PackageFinder(object): + ) + self.dependency_links.extend(links) + ++ @staticmethod ++ def get_extras_links(links): ++ requires = [] ++ extras = {} ++ ++ current_list = requires ++ ++ for link in links: ++ if not link: ++ current_list = requires ++ if link.startswith('['): ++ current_list = [] ++ extras[link[1:-1]] = current_list ++ else: ++ current_list.append(link) ++ ++ return extras ++ + @staticmethod + def _sort_locations(locations, expand_dir=False): + """ +@@ -272,7 +294,7 @@ class PackageFinder(object): + + return files, urls + +- def _candidate_sort_key(self, candidate): ++ def _candidate_sort_key(self, candidate, ignore_compatibility=False): + """ + Function used to generate link sort key for link tuples. + The greater the return value, the more preferred it is. +@@ -292,14 +314,19 @@ class PackageFinder(object): + if candidate.location.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(candidate.location.filename) +- if not wheel.supported(self.valid_tags): ++ if not wheel.supported(self.valid_tags) and not ignore_compatibility: + raise UnsupportedWheel( + "%s is not a supported wheel for this platform. It " + "can't be sorted." % wheel.filename + ) + if self.prefer_binary: + binary_preference = 1 +- pri = -(wheel.support_index_min(self.valid_tags)) ++ tags = self.valid_tags if not ignore_compatibility else None ++ try: ++ pri = -(wheel.support_index_min(tags=tags)) ++ except TypeError: ++ pri = -(support_num) ++ + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() +@@ -484,7 +511,7 @@ class PackageFinder(object): + dependency_versions + ) + +- def find_requirement(self, req, upgrade): ++ def find_requirement(self, req, upgrade, ignore_compatibility=False): + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean +@@ -594,8 +621,9 @@ class PackageFinder(object): + continue + seen.add(location) + +- page = self._get_page(location) +- if page is None: ++ try: ++ page = self._get_page(location) ++ except requests.HTTPError as e: + continue + + yield page +@@ -631,7 +659,7 @@ class PackageFinder(object): + logger.debug('Skipping link %s; %s', link, reason) + self.logged_links.add(link) + +- def _link_package_versions(self, link, search): ++ def _link_package_versions(self, link, search, ignore_compatibility=True): + """Return an InstallationCandidate or None""" + version = None + if link.egg_fragment: +@@ -647,12 +675,12 @@ class PackageFinder(object): + link, 'unsupported archive format: %s' % ext, + ) + return +- if "binary" not in search.formats and ext == wheel_ext: ++ if "binary" not in search.formats and ext == wheel_ext and not ignore_compatibility: + self._log_skipped_link( + link, 'No binaries permitted for %s' % search.supplied, + ) + return +- if "macosx10" in link.path and ext == '.zip': ++ if "macosx10" in link.path and ext == '.zip' and not ignore_compatibility: + self._log_skipped_link(link, 'macosx10 one') + return + if ext == wheel_ext: +@@ -666,7 +694,7 @@ class PackageFinder(object): + link, 'wrong project name (not %s)' % search.supplied) + return + +- if not wheel.supported(self.valid_tags): ++ if not wheel.supported(self.valid_tags) and not ignore_compatibility: + self._log_skipped_link( + link, 'it is not compatible with this Python') + return +@@ -702,14 +730,14 @@ class PackageFinder(object): + link.filename, link.requires_python) + support_this_python = True + +- if not support_this_python: ++ if not support_this_python and not ignore_compatibility: + logger.debug("The package %s is incompatible with the python" + "version in use. Acceptable python versions are:%s", + link, link.requires_python) + return + logger.debug('Found link %s, version: %s', link, version) + +- return InstallationCandidate(search.supplied, version, link) ++ return InstallationCandidate(search.supplied, version, link, link.requires_python) + + def _get_page(self, link): + return HTMLPage.get_page(link, session=self.session) +diff --git a/pipenv/patched/_internal/operations/prepare.py b/pipenv/patched/_internal/operations/prepare.py +index 7740c284..b6e946d8 100644 +--- a/pipenv/patched/_internal/operations/prepare.py ++++ b/pipenv/patched/_internal/operations/prepare.py +@@ -17,7 +17,7 @@ from pip._internal.exceptions import ( + ) + from pip._internal.utils.hashes import MissingHashes + from pip._internal.utils.logging import indent_log +-from pip._internal.utils.misc import display_path, normalize_path ++from pip._internal.utils.misc import display_path, normalize_path, rmtree + from pip._internal.vcs import vcs + + logger = logging.getLogger(__name__) +@@ -123,7 +123,11 @@ class IsSDist(DistAbstraction): + "Installing build dependencies" + ) + +- self.req.run_egg_info() ++ try: ++ self.req.run_egg_info() ++ except (OSError, TypeError): ++ self.req._correct_build_location() ++ self.req.run_egg_info() + self.req.assert_source_matches_version() + + +@@ -205,16 +209,8 @@ class RequirementPreparer(object): + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` +- # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): +- raise PreviousBuildDirError( +- "pip can't proceed with requirements '%s' due to a" +- " pre-existing build directory (%s). This is " +- "likely due to a previous installation that failed" +- ". pip is being responsible and not assuming it " +- "can delete this. Please delete it and try again." +- % (req, req.source_dir) +- ) ++ rmtree(req.source_dir) + req.populate_link(finder, upgrade_allowed, require_hashes) + + # We can't hit this spot and have populate_link return None. +diff --git a/pipenv/patched/_internal/pep425tags.py b/pipenv/patched/_internal/pep425tags.py +index 0b5c7832..bea31585 100644 +--- a/pipenv/patched/_internal/pep425tags.py ++++ b/pipenv/patched/_internal/pep425tags.py +@@ -10,7 +10,10 @@ import sysconfig + import warnings + from collections import OrderedDict + +-import pip._internal.utils.glibc ++try: ++ import pip._internal.utils.glibc ++except ImportError: ++ import pip.utils.glibc + + logger = logging.getLogger(__name__) + +diff --git a/pipenv/patched/_internal/req/req_install.py b/pipenv/patched/_internal/req/req_install.py +index 462c80aa..d039adc8 100644 +--- a/pipenv/patched/_internal/req/req_install.py ++++ b/pipenv/patched/_internal/req/req_install.py +@@ -615,7 +615,7 @@ class InstallRequirement(object): + + with indent_log(): + script = SETUPTOOLS_SHIM % self.setup_py +- base_cmd = [sys.executable, '-c', script] ++ base_cmd = [os.environ.get('PIP_PYTHON_PATH', sys.executable), '-c', script] + if self.isolated: + base_cmd += ["--no-user-cfg"] + egg_info_cmd = base_cmd + ['egg_info'] +@@ -797,7 +797,7 @@ class InstallRequirement(object): + with self.build_env: + call_subprocess( + [ +- sys.executable, ++ os.environ.get('PIP_PYTHON_PATH', sys.executable), + '-c', + SETUPTOOLS_SHIM % self.setup_py + ] + +@@ -1015,7 +1015,7 @@ class InstallRequirement(object): + + def get_install_args(self, global_options, record_filename, root, prefix, + pycompile): +- install_args = [sys.executable, "-u"] ++ install_args = [os.environ.get('PIP_PYTHON_PATH', sys.executable), "-u"] + install_args.append('-c') + install_args.append(SETUPTOOLS_SHIM % self.setup_py) + install_args += list(global_options) + \ +diff --git a/pipenv/patched/_internal/req/req_set.py b/pipenv/patched/_internal/req/req_set.py +index 2bc6b745..e552afc1 100644 +--- a/pipenv/patched/_internal/req/req_set.py ++++ b/pipenv/patched/_internal/req/req_set.py +@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) + + class RequirementSet(object): + +- def __init__(self, require_hashes=False): ++ def __init__(self, require_hashes=False, ignore_compatibility=True): + """Create a RequirementSet. + """ + +@@ -24,6 +24,7 @@ class RequirementSet(object): + self.unnamed_requirements = [] + self.successfully_downloaded = [] + self.reqs_to_cleanup = [] ++ self.ignore_compatibility = ignore_compatibility + + def __str__(self): + reqs = [req for req in self.requirements.values() +@@ -65,7 +66,7 @@ class RequirementSet(object): + # environment markers. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) +- if not wheel.supported(): ++ if not wheel.supported() and not self.ignore_compatibility: + raise InstallationError( + "%s is not a supported wheel on this platform." % + wheel.filename +@@ -151,7 +152,7 @@ class RequirementSet(object): + return self.requirements[name] + if name in self.requirement_aliases: + return self.requirements[self.requirement_aliases[name]] +- raise KeyError("No project with the name %r" % project_name) ++ # raise KeyError("No project with the name %r" % project_name) + + def cleanup_files(self): + """Clean up files, remove builds.""" +diff --git a/pipenv/patched/_internal/resolve.py b/pipenv/patched/_internal/resolve.py +index 8480e48c..ffc4aa7d 100644 +--- a/pipenv/patched/_internal/resolve.py ++++ b/pipenv/patched/_internal/resolve.py +@@ -35,7 +35,7 @@ class Resolver(object): + + def __init__(self, preparer, session, finder, wheel_cache, use_user_site, + ignore_dependencies, ignore_installed, ignore_requires_python, +- force_reinstall, isolated, upgrade_strategy): ++ force_reinstall, isolated, upgrade_strategy, ignore_compatibility=False): + super(Resolver, self).__init__() + assert upgrade_strategy in self._allowed_strategies + +@@ -55,7 +55,11 @@ class Resolver(object): + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python ++ self.ignore_compatibility = ignore_compatibility + self.use_user_site = use_user_site ++ self.requires_python = None ++ if self.ignore_compatibility: ++ self.ignore_requires_python = True + + self._discovered_dependencies = defaultdict(list) + +@@ -237,7 +241,7 @@ class Resolver(object): + + return abstract_dist + +- def _resolve_one(self, requirement_set, req_to_install): ++ def _resolve_one(self, requirement_set, req_to_install, ignore_requires_python=False): + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. +@@ -245,6 +249,9 @@ class Resolver(object): + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) ++ if ignore_requires_python or self.ignore_requires_python: ++ self.ignore_compatibility = True ++ + if req_to_install.constraint or req_to_install.prepared: + return [] + +@@ -260,11 +267,17 @@ class Resolver(object): + try: + check_dist_requires_python(dist) + except UnsupportedPythonVersion as err: +- if self.ignore_requires_python: ++ if self.ignore_compatibility: + logger.warning(err.args[0]) + else: + raise + ++ # A huge hack, by Kenneth Reitz. ++ try: ++ self.requires_python = check_dist_requires_python(dist, absorb=False) ++ except TypeError: ++ self.requires_python = None ++ + more_reqs = [] + + def add_req(subreq, extras_requested): +@@ -290,10 +303,14 @@ class Resolver(object): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): ++ available_requested = sorted( ++ set(dist.extras) & set(req_to_install.extras) ++ ) + # 'unnamed' requirements will get added here + req_to_install.is_direct = True + requirement_set.add_requirement( + req_to_install, parent_req_name=None, ++ extras_requested=available_requested, + ) + + if not self.ignore_dependencies: +@@ -317,6 +334,19 @@ class Resolver(object): + for subreq in dist.requires(available_requested): + add_req(subreq, extras_requested=available_requested) + ++ # Hack for deep-resolving extras. ++ for available in available_requested: ++ if hasattr(dist, '_DistInfoDistribution__dep_map'): ++ for req in dist._DistInfoDistribution__dep_map[available]: ++ req = InstallRequirement.from_req( ++ str(req), ++ req_to_install, ++ isolated=self.isolated, ++ wheel_cache=self.wheel_cache, ++ ) ++ ++ more_reqs.append(req) ++ + if not req_to_install.editable and not req_to_install.satisfied_by: + # XXX: --no-install leads this to report 'Successfully + # downloaded' for only non-editable reqs, even though we took +diff --git a/pipenv/patched/_internal/utils/misc.py b/pipenv/patched/_internal/utils/misc.py +index 3236af63..439a831d 100644 +--- a/pipenv/patched/_internal/utils/misc.py ++++ b/pipenv/patched/_internal/utils/misc.py +@@ -96,7 +96,7 @@ def get_prog(): + try: + prog = os.path.basename(sys.argv[0]) + if prog in ('__main__.py', '-c'): +- return "%s -m pip" % sys.executable ++ return "%s -m pip" % os.environ.get('PIP_PYTHON_PATH', sys.executable) + else: + return prog + except (AttributeError, TypeError, IndexError): +diff --git a/pipenv/patched/_internal/utils/packaging.py b/pipenv/patched/_internal/utils/packaging.py +index 5f9bb93d..276a9ccc 100644 +--- a/pipenv/patched/_internal/utils/packaging.py ++++ b/pipenv/patched/_internal/utils/packaging.py +@@ -28,7 +28,7 @@ def check_requires_python(requires_python): + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + # We only use major.minor.micro +- python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) ++ python_version = version.parse('{0}.{1}.{2}'.format(*sys.version_info[:3])) + return python_version in requires_python_specifier + + +@@ -40,20 +40,17 @@ def get_metadata(dist): + return dist.get_metadata('PKG-INFO') + + +-def check_dist_requires_python(dist): ++def check_dist_requires_python(dist, absorb=True): + metadata = get_metadata(dist) + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + requires_python = pkg_info_dict.get('Requires-Python') ++ if not absorb: ++ return requires_python + try: + if not check_requires_python(requires_python): +- raise exceptions.UnsupportedPythonVersion( +- "%s requires Python '%s' but the running Python is %s" % ( +- dist.project_name, +- requires_python, +- '.'.join(map(str, sys.version_info[:3])),) +- ) ++ return requires_python + except specifiers.InvalidSpecifier as e: + logger.warning( + "Package %s has an invalid Requires-Python entry %s - %s", +diff --git a/pipenv/patched/_internal/wheel.py b/pipenv/patched/_internal/wheel.py +index fcf9d3d3..d8aff848 100644 +--- a/pipenv/patched/_internal/wheel.py ++++ b/pipenv/patched/_internal/wheel.py +@@ -83,7 +83,7 @@ def fix_script(path): + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False +- exename = sys.executable.encode(sys.getfilesystemencoding()) ++ exename = os.environ.get('PIP_PYTHON_PATH', sys.executable).encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: +@@ -665,7 +665,7 @@ class WheelBuilder(object): + # relies on site.py to find parts of the standard library outside the + # virtualenv. + return [ +- sys.executable, '-u', '-c', ++ os.environ.get('PIP_PYTHON_PATH', sys.executable), '-u', '-c', + SETUPTOOLS_SHIM % req.setup_py + ] + list(self.global_options) + From 21eb04aa42f291e77f689393ea415b5e9f61db46 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:00:52 -0400 Subject: [PATCH 05/35] Remove pip10 patch Signed-off-by: Dan Ryan --- tasks/vendoring/patches/patched/pip10.patch | 557 -------------------- 1 file changed, 557 deletions(-) delete mode 100644 tasks/vendoring/patches/patched/pip10.patch diff --git a/tasks/vendoring/patches/patched/pip10.patch b/tasks/vendoring/patches/patched/pip10.patch deleted file mode 100644 index 92e87e60d3..0000000000 --- a/tasks/vendoring/patches/patched/pip10.patch +++ /dev/null @@ -1,557 +0,0 @@ -diff --git a/pipenv/patched/pip/_internal/download.py b/pipenv/patched/pip/_internal/download.py -index 06d72019..c3501bbf 100644 ---- a/pipenv/patched/pip/_internal/download.py -+++ b/pipenv/patched/pip/_internal/download.py -@@ -19,6 +19,7 @@ from pip._vendor.lockfile import LockError - from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter - from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth - from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response -+from pip._vendor.requests.sessions import Session - from pip._vendor.requests.structures import CaseInsensitiveDict - from pip._vendor.requests.utils import get_netrc_auth - # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is -@@ -70,7 +71,7 @@ def user_agent(): - Return a string representing the user agent. - """ - data = { -- "installer": {"name": "pip", "version": pip.__version__}, -+ "installer": {"name": "pip", "version": pipenv.patched.notpip.__version__}, - "python": platform.python_version(), - "implementation": { - "name": platform.python_implementation(), -@@ -323,7 +324,7 @@ class InsecureHTTPAdapter(HTTPAdapter): - conn.ca_certs = None - - --class PipSession(requests.Session): -+class PipSession(Session): - - timeout = None - -@@ -753,7 +754,7 @@ def _copy_dist_from_dir(link_path, location): - - # build an sdist - setup_py = 'setup.py' -- sdist_args = [sys.executable] -+ sdist_args = [os.environ.get('PIP_PYTHON_PATH', sys.executable)] - sdist_args.append('-c') - sdist_args.append(SETUPTOOLS_SHIM % setup_py) - sdist_args.append('sdist') -diff --git a/pipenv/patched/pip/_internal/index.py b/pipenv/patched/pip/_internal/index.py -index 3c3a92b7..fad6a623 100644 ---- a/pipenv/patched/pip/_internal/index.py -+++ b/pipenv/patched/pip/_internal/index.py -@@ -59,11 +59,12 @@ logger = logging.getLogger(__name__) - - class InstallationCandidate(object): - -- def __init__(self, project, version, location): -+ def __init__(self, project, version, location, requires_python=''): - self.project = project - self.version = parse_version(version) - self.location = location - self._key = (self.project, self.version, self.location) -+ self.requires_python = requires_python - - def __repr__(self): - return "".format( -@@ -168,6 +169,9 @@ class PackageFinder(object): - # The Session we'll use to make requests - self.session = session - -+ # Kenneth's Hack. -+ self.extra = None -+ - # The valid tags to check potential found wheel candidates against - self.valid_tags = get_supported( - versions=versions, -@@ -214,6 +218,24 @@ class PackageFinder(object): - ) - self.dependency_links.extend(links) - -+ @staticmethod -+ def get_extras_links(links): -+ requires = [] -+ extras = {} -+ -+ current_list = requires -+ -+ for link in links: -+ if not link: -+ current_list = requires -+ if link.startswith('['): -+ current_list = [] -+ extras[link[1:-1]] = current_list -+ else: -+ current_list.append(link) -+ -+ return extras -+ - @staticmethod - def _sort_locations(locations, expand_dir=False): - """ -@@ -266,7 +288,7 @@ class PackageFinder(object): - - return files, urls - -- def _candidate_sort_key(self, candidate): -+ def _candidate_sort_key(self, candidate, ignore_compatibility=True): - """ - Function used to generate link sort key for link tuples. - The greater the return value, the more preferred it is. -@@ -284,12 +306,18 @@ class PackageFinder(object): - if candidate.location.is_wheel: - # can raise InvalidWheelFilename - wheel = Wheel(candidate.location.filename) -- if not wheel.supported(self.valid_tags): -+ if not wheel.supported(self.valid_tags) and not ignore_compatibility: - raise UnsupportedWheel( - "%s is not a supported wheel for this platform. It " - "can't be sorted." % wheel.filename - ) -- pri = -(wheel.support_index_min(self.valid_tags)) -+ -+ tags = self.valid_tags if not ignore_compatibility else None -+ try: -+ pri = -(wheel.support_index_min(tags=tags)) -+ except TypeError: -+ pri = -(support_num) -+ - if wheel.build_tag is not None: - match = re.match(r'^(\d+)(.*)$', wheel.build_tag) - build_tag_groups = match.groups() -@@ -474,7 +502,7 @@ class PackageFinder(object): - dependency_versions - ) - -- def find_requirement(self, req, upgrade): -+ def find_requirement(self, req, upgrade, ignore_compatibility=False): - """Try to find a Link matching req - - Expects req, an InstallRequirement and upgrade, a boolean -@@ -484,22 +512,25 @@ class PackageFinder(object): - all_candidates = self.find_all_candidates(req.name) - - # Filter out anything which doesn't match our specifier -- compatible_versions = set( -- req.specifier.filter( -- # We turn the version object into a str here because otherwise -- # when we're debundled but setuptools isn't, Python will see -- # packaging.version.Version and -- # pkg_resources._vendor.packaging.version.Version as different -- # types. This way we'll use a str as a common data interchange -- # format. If we stop using the pkg_resources provided specifier -- # and start using our own, we can drop the cast to str(). -- [str(c.version) for c in all_candidates], -- prereleases=( -- self.allow_all_prereleases -- if self.allow_all_prereleases else None -- ), -+ if not ignore_compatibility: -+ compatible_versions = set( -+ req.specifier.filter( -+ # We turn the version object into a str here because otherwise -+ # when we're debundled but setuptools isn't, Python will see -+ # packaging.version.Version and -+ # pkg_resources._vendor.packaging.version.Version as different -+ # types. This way we'll use a str as a common data interchange -+ # format. If we stop using the pkg_resources provided specifier -+ # and start using our own, we can drop the cast to str(). -+ [str(c.version) for c in all_candidates], -+ prereleases=( -+ self.allow_all_prereleases -+ if self.allow_all_prereleases else None -+ ), -+ ) - ) -- ) -+ else: -+ compatible_versions = [str(c.version) for c in all_candidates] - applicable_candidates = [ - # Again, converting to str to deal with debundling. - c for c in all_candidates if str(c.version) in compatible_versions -@@ -584,7 +615,10 @@ class PackageFinder(object): - continue - seen.add(location) - -- page = self._get_page(location) -+ try: -+ page = self._get_page(location) -+ except requests.HTTPError as e: -+ page = None - if page is None: - continue - -@@ -621,7 +655,7 @@ class PackageFinder(object): - logger.debug('Skipping link %s; %s', link, reason) - self.logged_links.add(link) - -- def _link_package_versions(self, link, search): -+ def _link_package_versions(self, link, search, ignore_compatibility=True): - """Return an InstallationCandidate or None""" - version = None - if link.egg_fragment: -@@ -632,17 +666,18 @@ class PackageFinder(object): - if not ext: - self._log_skipped_link(link, 'not a file') - return -+ # Always ignore unsupported extensions even when we ignore compatibility - if ext not in SUPPORTED_EXTENSIONS: - self._log_skipped_link( - link, 'unsupported archive format: %s' % ext, - ) - return -- if "binary" not in search.formats and ext == wheel_ext: -+ if "binary" not in search.formats and ext == wheel_ext and not ignore_compatibility: - self._log_skipped_link( - link, 'No binaries permitted for %s' % search.supplied, - ) - return -- if "macosx10" in link.path and ext == '.zip': -+ if "macosx10" in link.path and ext == '.zip' and not ignore_compatibility: - self._log_skipped_link(link, 'macosx10 one') - return - if ext == wheel_ext: -@@ -656,7 +691,7 @@ class PackageFinder(object): - link, 'wrong project name (not %s)' % search.supplied) - return - -- if not wheel.supported(self.valid_tags): -+ if not wheel.supported(self.valid_tags) and not ignore_compatibility: - self._log_skipped_link( - link, 'it is not compatible with this Python') - return -@@ -692,14 +727,14 @@ class PackageFinder(object): - link.filename, link.requires_python) - support_this_python = True - -- if not support_this_python: -+ if not support_this_python and not ignore_compatibility: - logger.debug("The package %s is incompatible with the python" - "version in use. Acceptable python versions are:%s", - link, link.requires_python) - return - logger.debug('Found link %s, version: %s', link, version) - -- return InstallationCandidate(search.supplied, version, link) -+ return InstallationCandidate(search.supplied, version, link, link.requires_python) - - def _get_page(self, link): - return HTMLPage.get_page(link, session=self.session) -diff --git a/pipenv/patched/pip/_internal/req/req_install.py b/pipenv/patched/pip/_internal/req/req_install.py -index ddd167c6..344bce5b 100644 ---- a/pipenv/patched/pip/_internal/req/req_install.py -+++ b/pipenv/patched/pip/_internal/req/req_install.py -@@ -465,7 +465,7 @@ class InstallRequirement(object): - - with indent_log(): - script = SETUPTOOLS_SHIM % self.setup_py -- base_cmd = [sys.executable, '-c', script] -+ base_cmd = [os.environ.get('PIP_PYTHON_PATH', sys.executable), '-c', script] - if self.isolated: - base_cmd += ["--no-user-cfg"] - egg_info_cmd = base_cmd + ['egg_info'] -@@ -836,7 +836,7 @@ class InstallRequirement(object): - - def get_install_args(self, global_options, record_filename, root, prefix, - pycompile): -- install_args = [sys.executable, "-u"] -+ install_args = [os.environ.get('PIP_PYTHON_PATH', sys.executable), "-u"] - install_args.append('-c') - install_args.append(SETUPTOOLS_SHIM % self.setup_py) - install_args += list(global_options) + \ -@@ -888,7 +888,7 @@ class InstallRequirement(object): - with self.build_env: - call_subprocess( - [ -- sys.executable, -+ os.environ.get('PIP_PYTHON_PATH', sys.executable), - '-c', - SETUPTOOLS_SHIM % self.setup_py - ] + - -diff --git a/pipenv/patched/pip/_internal/req/req_set.py b/pipenv/patched/pip/_internal/req/req_set.py -index b2b55f89..50b5e2df 100644 ---- a/pipenv/patched/pip/_internal/req/req_set.py -+++ b/pipenv/patched/pip/_internal/req/req_set.py -@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) - - class RequirementSet(object): - -- def __init__(self, require_hashes=False): -+ def __init__(self, require_hashes=False, ignore_compatibility=True): - """Create a RequirementSet. - - :param wheel_cache: The pip wheel cache, for passing to -@@ -27,6 +27,7 @@ class RequirementSet(object): - self.unnamed_requirements = [] - self.successfully_downloaded = [] - self.reqs_to_cleanup = [] -+ self.ignore_compatibility = ignore_compatibility - - def __str__(self): - reqs = [req for req in self.requirements.values() -@@ -68,7 +69,7 @@ class RequirementSet(object): - # environment markers. - if install_req.link and install_req.link.is_wheel: - wheel = Wheel(install_req.link.filename) -- if not wheel.supported(): -+ if not wheel.supported() and not self.ignore_compatibility: - raise InstallationError( - "%s is not a supported wheel on this platform." % - wheel.filename -@@ -154,7 +155,7 @@ class RequirementSet(object): - return self.requirements[name] - if name in self.requirement_aliases: - return self.requirements[self.requirement_aliases[name]] -- raise KeyError("No project with the name %r" % project_name) -+ # raise KeyError("No project with the name %r" % project_name) - - def cleanup_files(self): - """Clean up files, remove builds.""" - -diff --git a/pipenv/patched/pip/_internal/resolve.py b/pipenv/patched/pip/_internal/resolve.py -index 3200fca8..14e3d298 100644 ---- a/pipenv/patched/pip/_internal/resolve.py -+++ b/pipenv/patched/pip/_internal/resolve.py -@@ -36,7 +36,7 @@ class Resolver(object): - - def __init__(self, preparer, session, finder, wheel_cache, use_user_site, - ignore_dependencies, ignore_installed, ignore_requires_python, -- force_reinstall, isolated, upgrade_strategy): -+ force_reinstall, isolated, upgrade_strategy, ignore_compatibility=False): - super(Resolver, self).__init__() - assert upgrade_strategy in self._allowed_strategies - -@@ -56,7 +56,11 @@ class Resolver(object): - self.ignore_dependencies = ignore_dependencies - self.ignore_installed = ignore_installed - self.ignore_requires_python = ignore_requires_python -+ self.ignore_compatibility = ignore_compatibility - self.use_user_site = use_user_site -+ self.requires_python = None -+ if self.ignore_compatibility: -+ self.ignore_requires_python = True - - self._discovered_dependencies = defaultdict(list) - -@@ -238,7 +242,7 @@ class Resolver(object): - - return abstract_dist - -- def _resolve_one(self, requirement_set, req_to_install): -+ def _resolve_one(self, requirement_set, req_to_install, ignore_requires_python=False): - """Prepare a single requirements file. - - :return: A list of additional InstallRequirements to also install. -@@ -246,6 +250,9 @@ class Resolver(object): - # Tell user what we are doing for this requirement: - # obtain (editable), skipping, processing (local url), collecting - # (remote url or package name) -+ if ignore_requires_python or self.ignore_requires_python: -+ self.ignore_compatibility = True -+ - if req_to_install.constraint or req_to_install.prepared: - return [] - -@@ -261,11 +268,17 @@ class Resolver(object): - try: - check_dist_requires_python(dist) - except UnsupportedPythonVersion as err: -- if self.ignore_requires_python: -+ if self.ignore_compatibility: - logger.warning(err.args[0]) - else: - raise - -+ # A huge hack, by Kenneth Reitz. -+ try: -+ self.requires_python = check_dist_requires_python(dist, absorb=False) -+ except TypeError: -+ self.requires_python = None -+ - more_reqs = [] - - def add_req(subreq, extras_requested): -@@ -291,10 +304,14 @@ class Resolver(object): - # We add req_to_install before its dependencies, so that we - # can refer to it when adding dependencies. - if not requirement_set.has_requirement(req_to_install.name): -+ available_requested = sorted( -+ set(dist.extras) & set(req_to_install.extras) -+ ) - # 'unnamed' requirements will get added here - req_to_install.is_direct = True - requirement_set.add_requirement( - req_to_install, parent_req_name=None, -+ extras_requested=available_requested, - ) - - if not self.ignore_dependencies: -@@ -318,6 +335,19 @@ class Resolver(object): - for subreq in dist.requires(available_requested): - add_req(subreq, extras_requested=available_requested) - -+ # Hack for deep-resolving extras. -+ for available in available_requested: -+ if hasattr(dist, '_DistInfoDistribution__dep_map'): -+ for req in dist._DistInfoDistribution__dep_map[available]: -+ req = InstallRequirement.from_req( -+ str(req), -+ req_to_install, -+ isolated=self.isolated, -+ wheel_cache=self.wheel_cache, -+ ) -+ -+ more_reqs.append(req) -+ - if not req_to_install.editable and not req_to_install.satisfied_by: - # XXX: --no-install leads this to report 'Successfully - # downloaded' for only non-editable reqs, even though we took -diff --git a/pipenv/patched/pip/_internal/utils/misc.py b/pipenv/patched/pip/_internal/utils/misc.py -index 9d4c9b16..d0c8e437 100644 ---- a/pipenv/patched/pip/_internal/utils/misc.py -+++ b/pipenv/patched/pip/_internal/utils/misc.py -@@ -93,7 +93,7 @@ def get_prog(): - try: - prog = os.path.basename(sys.argv[0]) - if prog in ('__main__.py', '-c'): -- return "%s -m pip" % sys.executable -+ return "%s -m pip" % os.environ.get('PIP_PYTHON_PATH', sys.executable) - else: - return prog - except (AttributeError, TypeError, IndexError): - -diff --git a/pipenv/patched/pip/_internal/utils/packaging.py b/pipenv/patched/pip/_internal/utils/packaging.py -index 5f9bb93d..83edd874 100644 ---- a/pipenv/patched/pip/_internal/utils/packaging.py -+++ b/pipenv/patched/pip/_internal/utils/packaging.py -@@ -1,6 +1,7 @@ - from __future__ import absolute_import - - import logging -+import os - import sys - from email.parser import FeedParser # type: ignore - -@@ -28,7 +29,7 @@ def check_requires_python(requires_python): - requires_python_specifier = specifiers.SpecifierSet(requires_python) - - # We only use major.minor.micro -- python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) -+ python_version = version.parse('{0}.{1}.{2}'.format(*sys.version_info[:3])) - return python_version in requires_python_specifier - - -@@ -40,20 +41,23 @@ def get_metadata(dist): - return dist.get_metadata('PKG-INFO') - - --def check_dist_requires_python(dist): -+def check_dist_requires_python(dist, absorb=True): - metadata = get_metadata(dist) - feed_parser = FeedParser() - feed_parser.feed(metadata) - pkg_info_dict = feed_parser.close() - requires_python = pkg_info_dict.get('Requires-Python') -+ if not absorb: -+ return requires_python - try: - if not check_requires_python(requires_python): -- raise exceptions.UnsupportedPythonVersion( -- "%s requires Python '%s' but the running Python is %s" % ( -- dist.project_name, -- requires_python, -- '.'.join(map(str, sys.version_info[:3])),) -- ) -+ # raise exceptions.UnsupportedPythonVersion( -+ # "%s requires Python '%s' but the running Python is %s" % ( -+ # dist.project_name, -+ # requires_python, -+ # '.'.join(map(str, sys.version_info[:3])),) -+ # ) -+ return - except specifiers.InvalidSpecifier as e: - logger.warning( - "Package %s has an invalid Requires-Python entry %s - %s", - -diff --git a/pipenv/patched/pip/_internal/wheel.py b/pipenv/patched/pip/_internal/wheel.py -index c71f17d2..3e29a49d 100644 ---- a/pipenv/patched/pip/_internal/wheel.py -+++ b/pipenv/patched/pip/_internal/wheel.py -@@ -85,7 +85,7 @@ def fix_script(path): - firstline = script.readline() - if not firstline.startswith(b'#!python'): - return False -- exename = sys.executable.encode(sys.getfilesystemencoding()) -+ exename = os.environ.get('PIP_PYTHON_PATH', sys.executable).encode(sys.getfilesystemencoding()) - firstline = b'#!' + exename + os.linesep.encode("ascii") - rest = script.read() - with open(path, 'wb') as script: -@@ -655,7 +655,7 @@ class WheelBuilder(object): - # relies on site.py to find parts of the standard library outside the - # virtualenv. - return [ -- sys.executable, '-u', '-c', -+ os.environ.get('PIP_PYTHON_PATH', sys.executable), '-u', '-c', - SETUPTOOLS_SHIM % req.setup_py - ] + list(self.global_options) - -diff --git a/pipenv/patched/pip/_internal/operations/prepare.py b/pipenv/patched/pip/_internal/operations/prepare.py -index 27e3a5dd..4d120faa 100644 ---- a/pipenv/patched/pip/_internal/operations/prepare.py -+++ b/pipenv/patched/pip/_internal/operations/prepare.py -@@ -151,7 +151,11 @@ class IsSDist(DistAbstraction): - else: - self.req.build_env = NoOpBuildEnvironment(no_clean=False) - -- self.req.run_egg_info() -+ try: -+ self.req.run_egg_info() -+ except (OSError, TypeError): -+ self.req._correct_build_location() -+ self.req.run_egg_info() - self.req.assert_source_matches_version() - - -@@ -233,15 +237,15 @@ class RequirementPreparer(object): - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # package unpacked in `req.source_dir` -- if os.path.exists(os.path.join(req.source_dir, 'setup.py')): -- raise PreviousBuildDirError( -- "pip can't proceed with requirements '%s' due to a" -- " pre-existing build directory (%s). This is " -- "likely due to a previous installation that failed" -- ". pip is being responsible and not assuming it " -- "can delete this. Please delete it and try again." -- % (req, req.source_dir) -- ) -+ # if os.path.exists(os.path.join(req.source_dir, 'setup.py')): -+ # raise PreviousBuildDirError( -+ # "pip can't proceed with requirements '%s' due to a" -+ # " pre-existing build directory (%s). This is " -+ # "likely due to a previous installation that failed" -+ # ". pip is being responsible and not assuming it " -+ # "can delete this. Please delete it and try again." -+ # % (req, req.source_dir) -+ # ) - req.populate_link(finder, upgrade_allowed, require_hashes) - - # We can't hit this spot and have populate_link return None. -diff --git a/pipenv/patched/pip/_internal/pep425tags.py b/pipenv/patched/pip/_internal/pep425tags.py -index c2290ab9..bea31585 100644 ---- a/pipenv/patched/pip/_internal/pep425tags.py -+++ b/pipenv/patched/pip/_internal/pep425tags.py -@@ -10,7 +10,10 @@ import sysconfig - import warnings - from collections import OrderedDict - --import pip._internal.utils.glibc -+try: -+ import pip._internal.utils.glibc -+except ImportError: -+ import pip.utils.glibc - - logger = logging.getLogger(__name__) - From 949a671e9215ba291ef3f3ba0823dee872bef704 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:06:38 -0400 Subject: [PATCH 06/35] Update vistir vendoring Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 2 +- pipenv/vendor/vistir/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 52d52ec6ec..46538176ea 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -41,7 +41,7 @@ semver==2.8.1 shutilwhich==1.1.0 toml==0.9.4 cached-property==1.4.3 -vistir==git+https://github.com/sarugaku/vistir.git@e50a767497a059cdd727583448b77c694374fa38#egg=vistir +vistir==0.1.5 pip-shims==0.1.2 modutil==2.0.0 ptyprocess==0.6.0 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index 94079aa5bb..1e843dee7c 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -13,7 +13,7 @@ from .path import mkdir_p, rmtree -__version__ = '0.1.5.dev0' +__version__ = '0.1.5' __all__ = [ From 7784f64f0bb0e6fa8b7f2e255c56253786050294 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:13:22 -0400 Subject: [PATCH 07/35] Fix passa dep Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 46538176ea..5ba19048c0 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -47,4 +47,4 @@ pip-shims==0.1.2 ptyprocess==0.6.0 enum34==1.1.6 yaspin==0.14.0 -passa==git+https://github.com/sarugaku/passa.git@master#egg=passa +git+https://github.com/sarugaku/passa.git@master#egg=passa From 7bc29c186229eec8196a376e933f2d936c3d9a1c Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:21:36 -0400 Subject: [PATCH 08/35] update patch for piptools Signed-off-by: Dan Ryan --- .../vendoring/patches/patched/piptools.patch | 322 +++++++----------- 1 file changed, 120 insertions(+), 202 deletions(-) diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 80b3f9e58c..1f1ab655be 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -1,23 +1,78 @@ -diff --git a/pipenv/patched/piptools/locations.py b/pipenv/patched/piptools/locations.py -index 4e6174c..75f9b49 100644 ---- a/pipenv/patched/piptools/locations.py -+++ b/pipenv/patched/piptools/locations.py -@@ -2,10 +2,13 @@ import os - from shutil import rmtree - - from .click import secho --from ._compat import user_cache_dir -+# Patch by vphilippon 2017-11-22: Use pipenv cache path. -+# from ._compat import user_cache_dir -+from pipenv.environments import PIPENV_CACHE_DIR - - # The user_cache_dir helper comes straight from pip itself --CACHE_DIR = user_cache_dir('pip-tools') -+# CACHE_DIR = user_cache_dir(os.path.join('pip-tools')) -+CACHE_DIR = PIPENV_CACHE_DIR - - # NOTE - # We used to store the cache dir under ~/.pip-tools, which is not the +diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py +index 674674a..feadad8 100644 +--- a/pipenv/patched/piptools/_compat/__init__.py ++++ b/pipenv/patched/piptools/_compat/__init__.py +@@ -27,4 +27,6 @@ from .pip_compat import ( + cmdoptions, + get_installed_distributions, + PyPI, ++ SafeFileCache, ++ InstallationError, + ) +diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py +index 1ba2cc7..3ea0826 100644 +--- a/pipenv/patched/piptools/_compat/pip_compat.py ++++ b/pipenv/patched/piptools/_compat/pip_compat.py +@@ -1,9 +1,10 @@ + # -*- coding=utf-8 -*- + import importlib + +-def do_import(module_path, subimport=None, old_path=None): ++def do_import(module_path, subimport=None, old_path=None, vendored_name=None): + old_path = old_path or module_path +- prefixes = ["pip._internal", "pip"] ++ prefix = vendored_name if vendored_name else "pip" ++ prefixes = ["{0}._internal".format(prefix), "{0}".format(prefix)] + paths = [module_path, old_path] + search_order = ["{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None] + package = subimport if subimport else None +@@ -18,17 +19,19 @@ def do_import(module_path, subimport=None, old_path=None): + return getattr(imported, package) + + +-InstallRequirement = do_import('req.req_install', 'InstallRequirement') +-parse_requirements = do_import('req.req_file', 'parse_requirements') +-RequirementSet = do_import('req.req_set', 'RequirementSet') +-user_cache_dir = do_import('utils.appdirs', 'user_cache_dir') +-FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH') +-is_file_url = do_import('download', 'is_file_url') +-url_to_path = do_import('download', 'url_to_path') +-PackageFinder = do_import('index', 'PackageFinder') +-FormatControl = do_import('index', 'FormatControl') +-Wheel = do_import('wheel', 'Wheel') +-Command = do_import('cli.base_command', 'Command', old_path='basecommand') +-cmdoptions = do_import('cli.cmdoptions', old_path='cmdoptions') +-get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils') +-PyPI = do_import('models.index', 'PyPI') ++InstallRequirement = do_import('req.req_install', 'InstallRequirement', vendored_name="notpip") ++parse_requirements = do_import('req.req_file', 'parse_requirements', vendored_name="notpip") ++RequirementSet = do_import('req.req_set', 'RequirementSet', vendored_name="notpip") ++user_cache_dir = do_import('utils.appdirs', 'user_cache_dir', vendored_name="notpip") ++FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH', vendored_name="notpip") ++is_file_url = do_import('download', 'is_file_url', vendored_name="notpip") ++url_to_path = do_import('download', 'url_to_path', vendored_name="notpip") ++PackageFinder = do_import('index', 'PackageFinder', vendored_name="notpip") ++FormatControl = do_import('index', 'FormatControl', vendored_name="notpip") ++Wheel = do_import('wheel', 'Wheel', vendored_name="notpip") ++Command = do_import('cli.base_command', 'Command', old_path='basecommand', vendored_name="notpip") ++cmdoptions = do_import('cli.cmdoptions', old_path='cmdoptions', vendored_name="notpip") ++get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils', vendored_name="notpip") ++PyPI = do_import('models.index', 'PyPI', vendored_name='notpip') ++SafeFileCache = do_import('download', 'SafeFileCache', vendored_name='notpip') ++InstallationError = do_import('exceptions', 'InstallationError', vendored_name='notpip') +diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py +index 08dabe1..480ad1e 100644 +--- a/pipenv/patched/piptools/repositories/local.py ++++ b/pipenv/patched/piptools/repositories/local.py +@@ -56,7 +56,7 @@ class LocalRequirementsRepository(BaseRepository): + if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): + project, version, _ = as_tuple(existing_pin) + return make_install_requirement( +- project, version, ireq.extras, constraint=ireq.constraint ++ project, version, ireq.extras, constraint=ireq.constraint, markers=ireq.markers + ) + else: + return self.repository.find_best_match(ireq, prereleases) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index bf69803..eb20560 100644 --- a/pipenv/patched/piptools/repositories/pypi.py @@ -434,43 +489,27 @@ index bf69803..eb20560 100644 def allow_all_wheels(self): """ diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py -index 05ec8fd..2f94f6b 100644 +index c2d323c..d5a471d 100644 --- a/pipenv/patched/piptools/resolver.py +++ b/pipenv/patched/piptools/resolver.py -@@ -8,13 +8,14 @@ from itertools import chain, count - import os - - from first import first -+from pip._vendor.packaging.markers import default_environment - from ._compat import InstallRequirement - - from . import click +@@ -13,7 +13,7 @@ from . import click from .cache import DependencyCache from .exceptions import UnsupportedConstraint from .logging import log -from .utils import (format_requirement, format_specifier, full_groupby, +from .utils import (format_requirement, format_specifier, full_groupby, dedup, simplify_markers, is_pinned_requirement, key_from_ireq, key_from_req, UNSAFE_PACKAGES) - + green = partial(click.style, fg='green') -@@ -28,6 +29,7 @@ class RequirementSummary(object): +@@ -27,6 +27,7 @@ class RequirementSummary(object): def __init__(self, ireq): self.req = ireq.req self.key = key_from_req(ireq.req) + self.markers = ireq.markers self.extras = str(sorted(ireq.extras)) self.specifier = str(ireq.specifier) - -@@ -71,7 +73,7 @@ class Resolver(object): - with self.repository.allow_all_wheels(): - return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs} - -- def resolve(self, max_rounds=10): -+ def resolve(self, max_rounds=12): - """ - Finds concrete package versions for all the given InstallRequirements - and their recursive dependencies. The end result is a flat list of -@@ -120,7 +122,7 @@ class Resolver(object): + +@@ -119,7 +120,7 @@ class Resolver(object): @staticmethod def check_constraints(constraints): for constraint in constraints: @@ -479,23 +518,9 @@ index 05ec8fd..2f94f6b 100644 msg = ('pip-compile does not support URLs as packages, unless they are editable. ' 'Perhaps add -e option?') raise UnsupportedConstraint(msg, constraint) -@@ -147,15 +149,23 @@ class Resolver(object): - if editable_ireq: - yield editable_ireq # ignore all the other specs: the editable one is the one that counts - continue -- - ireqs = iter(ireqs) - # deepcopy the accumulator so as to not modify the self.our_constraints invariant - combined_ireq = copy.deepcopy(next(ireqs)) -- combined_ireq.comes_from = None - for ireq in ireqs: +@@ -155,6 +156,13 @@ class Resolver(object): # NOTE we may be losing some info on dropped reqs here -- combined_ireq.req.specifier &= ireq.req.specifier -+ try: -+ combined_ireq.req.specifier &= ireq.req.specifier -+ except TypeError: -+ if ireq.req.specifier._specs and not combined_ireq.req.specifier._specs: -+ combined_ireq.req.specifier._specs = ireq.req.specifier._specs + combined_ireq.req.specifier &= ireq.req.specifier combined_ireq.constraint &= ireq.constraint + if not combined_ireq.markers: + combined_ireq.markers = ireq.markers @@ -503,80 +528,59 @@ index 05ec8fd..2f94f6b 100644 + _markers = combined_ireq.markers._markers + if not isinstance(_markers[0], (tuple, list)): + combined_ireq.markers._markers = [_markers, 'and', ireq.markers._markers] ++ # Return a sorted, de-duped tuple of extras combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))) yield combined_ireq -@@ -273,6 +283,14 @@ class Resolver(object): +@@ -272,6 +280,15 @@ class Resolver(object): for dependency in self.repository.get_dependencies(ireq): yield dependency return + + # fix our malformed extras + if ireq.extras: -+ if hasattr(ireq, 'extra'): ++ if getattr(ireq, "extra", None): + if ireq.extras: + ireq.extras.extend(ireq.extra) + else: + ireq.extras = ireq.extra ++ elif not is_pinned_requirement(ireq): raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq)) - -@@ -283,14 +301,14 @@ class Resolver(object): + +@@ -282,7 +299,7 @@ class Resolver(object): if ireq not in self.dependency_cache: log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow') dependencies = self.repository.get_dependencies(ireq) - self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies) -+ self.dependency_cache[ireq] = sorted(format_requirement(_ireq) for _ireq in dependencies) - ++ self.dependency_cache[ireq] = sorted(set(format_requirement(ireq) for ireq in dependencies)) + # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4'] dependency_strings = self.dependency_cache[ireq] - log.debug(' {:25} requires {}'.format(format_requirement(ireq), - ', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-')) - for dependency_string in dependency_strings: -- yield InstallRequirement.from_line(dependency_string, constraint=ireq.constraint) -+ yield InstallRequirement.from_line(dependency_string, constraint=ireq.constraint) - - def reverse_dependencies(self, ireqs): - non_editable = [ireq for ireq in ireqs if not ireq.editable] -diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py -index 08dabe1..480ad1e 100644 ---- a/pipenv/patched/piptools/repositories/local.py -+++ b/pipenv/patched/piptools/repositories/local.py -@@ -56,7 +56,7 @@ class LocalRequirementsRepository(BaseRepository): - if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): - project, version, _ = as_tuple(existing_pin) - return make_install_requirement( -- project, version, ireq.extras, constraint=ireq.constraint -+ project, version, ireq.extras, constraint=ireq.constraint, markers=ireq.markers - ) - else: - return self.repository.find_best_match(ireq, prereleases) diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py -index fde5816..23a05f2 100644 +index a164334..6225d7e 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py -@@ -2,6 +2,7 @@ - from __future__ import (absolute_import, division, print_function, - unicode_literals) - -+import six +@@ -4,6 +4,7 @@ from __future__ import (absolute_import, division, print_function, + import os import sys ++import six from itertools import chain, groupby -@@ -11,13 +12,79 @@ from contextlib import contextmanager + from collections import OrderedDict + from contextlib import contextmanager +@@ -11,11 +12,78 @@ from contextlib import contextmanager from ._compat import InstallRequirement - - from first import first -- + + from .click import style +from pip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier +from pip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version +from pip._vendor.packaging.markers import Marker, Op, Value, Variable - from .click import style - - + + UNSAFE_PACKAGES = {'setuptools', 'distribute', 'pip'} - - + + +def simplify_markers(ireq): + """simplify_markers "This code cleans up markers for a specific :class:`~InstallRequirement`" + @@ -644,10 +648,10 @@ index fde5816..23a05f2 100644 def key_from_ireq(ireq): """Get a standardized key for an InstallRequirement.""" if ireq.req is None and ireq.link is not None: -@@ -43,16 +110,51 @@ def comment(text): +@@ -41,30 +109,61 @@ def comment(text): return style(text, fg='green') - - + + -def make_install_requirement(name, version, extras, constraint=False): +def make_install_requirement(name, version, extras, markers, constraint=False): # If no extras are specified, the extras string is blank @@ -655,7 +659,7 @@ index fde5816..23a05f2 100644 if extras: # Sort extras for stability extras_string = "[{}]".format(",".join(sorted(extras))) - + - return InstallRequirement.from_line( - str('{}{}=={}'.format(name, extras_string, version)), - constraint=constraint) @@ -667,10 +671,13 @@ index fde5816..23a05f2 100644 + return InstallRequirement.from_line( + str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))), + constraint=constraint) -+ -+ + + +-def format_requirement(ireq, marker=None): +def _requirement_to_str_lowercase_name(requirement): -+ """ + """ +- Generic formatter for pretty printing InstallRequirements to the terminal +- in a less verbose way than using its `__str__` method. + Formats a packaging.requirements.Requirement with a lowercase name. + + This is simply a copy of @@ -681,7 +688,7 @@ index fde5816..23a05f2 100644 + lowercasing the entire result, which would lowercase the name, *and* other, + important stuff that should not be lowercased (such as the marker). See + this issue for more information: https://github.com/pypa/pipenv/issues/2113. -+ """ + """ + parts = [requirement.name.lower()] + + if requirement.extras: @@ -697,108 +704,19 @@ index fde5816..23a05f2 100644 + parts.append("; {0}".format(requirement.marker)) + + return "".join(parts) - - - def format_requirement(ireq, marker=None): -@@ -63,10 +165,10 @@ def format_requirement(ireq, marker=None): ++ ++ ++def format_requirement(ireq, marker=None): if ireq.editable: line = '-e {}'.format(ireq.link) else: - line = str(ireq.req).lower() + line = _requirement_to_str_lowercase_name(ireq.req) - + - if marker: - line = '{} ; {}'.format(line, marker) + if marker and ';' not in line: + line = '{}; {}'.format(line, marker) - - return line - -diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py -index 7e8cdf3..0a0d27d 100644 ---- a/pipenv/patched/piptools/_compat/pip_compat.py -+++ b/pipenv/patched/piptools/_compat/pip_compat.py -@@ -1,30 +1,42 @@ - # -*- coding=utf-8 -*- - import importlib --def do_import(module_path, subimport=None, old_path=None): -+ -+def do_import(module_path, subimport=None, old_path=None, vendored_name=None): - internal = 'pip._internal.{0}'.format(module_path) - old_path = old_path or module_path - pip9 = 'pip.{0}'.format(old_path) -- try: -- _tmp = importlib.import_module(internal) -- except ImportError: -- _tmp = importlib.import_module(pip9) -+ _tmp = None -+ if vendored_name: -+ vendor = '{0}._internal'.format(vendored_name) -+ vendor = '{0}.{1}'.format(vendor, old_path if old_path else module_path) -+ try: -+ _tmp = importlib.import_module(vendor) -+ except ImportError: -+ pass -+ if not _tmp: -+ try: -+ _tmp = importlib.import_module(internal) -+ except ImportError: -+ _tmp = importlib.import_module(pip9) - if subimport: - return getattr(_tmp, subimport, _tmp) - return _tmp -- + return line --InstallRequirement = do_import('req.req_install', 'InstallRequirement') --parse_requirements = do_import('req.req_file', 'parse_requirements') --RequirementSet = do_import('req.req_set', 'RequirementSet') --user_cache_dir = do_import('utils.appdirs', 'user_cache_dir') --FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH') --is_file_url = do_import('download', 'is_file_url') --url_to_path = do_import('download', 'url_to_path') --PackageFinder = do_import('index', 'PackageFinder') --FormatControl = do_import('index', 'FormatControl') --Wheel = do_import('wheel', 'Wheel') --Command = do_import('basecommand', 'Command') --cmdoptions = do_import('cmdoptions') --get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils') --PyPI = do_import('models.index', 'PyPI') -+ -+InstallRequirement = do_import('req.req_install', 'InstallRequirement', vendored_name='notpip') -+parse_requirements = do_import('req.req_file', 'parse_requirements', vendored_name='notpip') -+RequirementSet = do_import('req.req_set', 'RequirementSet', vendored_name='notpip') -+user_cache_dir = do_import('utils.appdirs', 'user_cache_dir', vendored_name='notpip') -+FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH', vendored_name='notpip') -+is_file_url = do_import('download', 'is_file_url', vendored_name='notpip') -+url_to_path = do_import('download', 'url_to_path', vendored_name='notpip') -+PackageFinder = do_import('index', 'PackageFinder', vendored_name='notpip') -+FormatControl = do_import('index', 'FormatControl', vendored_name='notpip') -+Wheel = do_import('wheel', 'Wheel', vendored_name='notpip') -+Command = do_import('basecommand', 'Command', vendored_name='notpip') -+cmdoptions = do_import('cmdoptions', vendored_name='notpip') -+get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils', vendored_name='notpip') -+PyPI = do_import('models.index', 'PyPI', vendored_name='notpip') -+SafeFileCache = do_import('download', 'SafeFileCache', vendored_name='notpip') -+InstallationError = do_import('exceptions', 'InstallationError', vendored_name='notpip') -diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py -index 674674a..feadad8 100644 ---- a/pipenv/patched/piptools/_compat/__init__.py -+++ b/pipenv/patched/piptools/_compat/__init__.py -@@ -27,4 +27,6 @@ from .pip_compat import ( - cmdoptions, - get_installed_distributions, - PyPI, -+ SafeFileCache, -+ InstallationError, - ) -diff --git a/pipenv/patched/pip/_vendor/__init__.py b/pipenv/patched/pip/_vendor/__init__.py -index 774f1bf3..40ce7a01 100644 ---- a/pipenv/patched/pip/_vendor/__init__.py -+++ b/pipenv/patched/pip/_vendor/__init__.py -@@ -107,3 +107,5 @@ if DEBUNDLED: - vendored("requests.packages.urllib3.util.ssl_") - vendored("requests.packages.urllib3.util.timeout") - vendored("requests.packages.urllib3.util.url") -+ -+import requests From 42eed8aefc51e03a60b4e9237492129dc5e9728d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:24:51 -0400 Subject: [PATCH 09/35] fix pip patch Signed-off-by: Dan Ryan --- tasks/vendoring/patches/patched/pip18.patch | 60 ++++++++++----------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/tasks/vendoring/patches/patched/pip18.patch b/tasks/vendoring/patches/patched/pip18.patch index bdb82b3ccf..539825c2ee 100644 --- a/tasks/vendoring/patches/patched/pip18.patch +++ b/tasks/vendoring/patches/patched/pip18.patch @@ -1,7 +1,7 @@ -diff --git a/pipenv/patched/_internal/download.py b/pipenv/patched/_internal/download.py +diff --git a/pipenv/patched/pip/_internal/download.py b/pipenv/patched/pip/_internal/download.py index 96f3b65c..3fb4ebef 100644 ---- a/pipenv/patched/_internal/download.py -+++ b/pipenv/patched/_internal/download.py +--- a/pipenv/patched/pip/_internal/download.py ++++ b/pipenv/patched/pip/_internal/download.py @@ -19,6 +19,7 @@ from pip._vendor.lockfile import LockError from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth @@ -37,10 +37,10 @@ index 96f3b65c..3fb4ebef 100644 sdist_args.append('-c') sdist_args.append(SETUPTOOLS_SHIM % setup_py) sdist_args.append('sdist') -diff --git a/pipenv/patched/_internal/index.py b/pipenv/patched/_internal/index.py +diff --git a/pipenv/patched/pip/_internal/index.py b/pipenv/patched/pip/_internal/index.py index 8c0ec82c..ad00ba04 100644 ---- a/pipenv/patched/_internal/index.py -+++ b/pipenv/patched/_internal/index.py +--- a/pipenv/patched/pip/_internal/index.py ++++ b/pipenv/patched/pip/_internal/index.py @@ -58,11 +58,12 @@ logger = logging.getLogger(__name__) class InstallationCandidate(object): @@ -192,10 +192,10 @@ index 8c0ec82c..ad00ba04 100644 def _get_page(self, link): return HTMLPage.get_page(link, session=self.session) -diff --git a/pipenv/patched/_internal/operations/prepare.py b/pipenv/patched/_internal/operations/prepare.py +diff --git a/pipenv/patched/pip/_internal/operations/prepare.py b/pipenv/patched/pip/_internal/operations/prepare.py index 7740c284..b6e946d8 100644 ---- a/pipenv/patched/_internal/operations/prepare.py -+++ b/pipenv/patched/_internal/operations/prepare.py +--- a/pipenv/patched/pip/_internal/operations/prepare.py ++++ b/pipenv/patched/pip/_internal/operations/prepare.py @@ -17,7 +17,7 @@ from pip._internal.exceptions import ( ) from pip._internal.utils.hashes import MissingHashes @@ -236,10 +236,10 @@ index 7740c284..b6e946d8 100644 req.populate_link(finder, upgrade_allowed, require_hashes) # We can't hit this spot and have populate_link return None. -diff --git a/pipenv/patched/_internal/pep425tags.py b/pipenv/patched/_internal/pep425tags.py +diff --git a/pipenv/patched/pip/_internal/pep425tags.py b/pipenv/patched/pip/_internal/pep425tags.py index 0b5c7832..bea31585 100644 ---- a/pipenv/patched/_internal/pep425tags.py -+++ b/pipenv/patched/_internal/pep425tags.py +--- a/pipenv/patched/pip/_internal/pep425tags.py ++++ b/pipenv/patched/pip/_internal/pep425tags.py @@ -10,7 +10,10 @@ import sysconfig import warnings from collections import OrderedDict @@ -252,10 +252,10 @@ index 0b5c7832..bea31585 100644 logger = logging.getLogger(__name__) -diff --git a/pipenv/patched/_internal/req/req_install.py b/pipenv/patched/_internal/req/req_install.py +diff --git a/pipenv/patched/pip/_internal/req/req_install.py b/pipenv/patched/pip/_internal/req/req_install.py index 462c80aa..d039adc8 100644 ---- a/pipenv/patched/_internal/req/req_install.py -+++ b/pipenv/patched/_internal/req/req_install.py +--- a/pipenv/patched/pip/_internal/req/req_install.py ++++ b/pipenv/patched/pip/_internal/req/req_install.py @@ -615,7 +615,7 @@ class InstallRequirement(object): with indent_log(): @@ -283,10 +283,10 @@ index 462c80aa..d039adc8 100644 install_args.append('-c') install_args.append(SETUPTOOLS_SHIM % self.setup_py) install_args += list(global_options) + \ -diff --git a/pipenv/patched/_internal/req/req_set.py b/pipenv/patched/_internal/req/req_set.py +diff --git a/pipenv/patched/pip/_internal/req/req_set.py b/pipenv/patched/pip/_internal/req/req_set.py index 2bc6b745..e552afc1 100644 ---- a/pipenv/patched/_internal/req/req_set.py -+++ b/pipenv/patched/_internal/req/req_set.py +--- a/pipenv/patched/pip/_internal/req/req_set.py ++++ b/pipenv/patched/pip/_internal/req/req_set.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) class RequirementSet(object): @@ -322,10 +322,10 @@ index 2bc6b745..e552afc1 100644 def cleanup_files(self): """Clean up files, remove builds.""" -diff --git a/pipenv/patched/_internal/resolve.py b/pipenv/patched/_internal/resolve.py +diff --git a/pipenv/patched/pip/_internal/resolve.py b/pipenv/patched/pip/_internal/resolve.py index 8480e48c..ffc4aa7d 100644 ---- a/pipenv/patched/_internal/resolve.py -+++ b/pipenv/patched/_internal/resolve.py +--- a/pipenv/patched/pip/_internal/resolve.py ++++ b/pipenv/patched/pip/_internal/resolve.py @@ -35,7 +35,7 @@ class Resolver(object): def __init__(self, preparer, session, finder, wheel_cache, use_user_site, @@ -420,10 +420,10 @@ index 8480e48c..ffc4aa7d 100644 if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took -diff --git a/pipenv/patched/_internal/utils/misc.py b/pipenv/patched/_internal/utils/misc.py +diff --git a/pipenv/patched/pip/_internal/utils/misc.py b/pipenv/patched/pip/_internal/utils/misc.py index 3236af63..439a831d 100644 ---- a/pipenv/patched/_internal/utils/misc.py -+++ b/pipenv/patched/_internal/utils/misc.py +--- a/pipenv/patched/pip/_internal/utils/misc.py ++++ b/pipenv/patched/pip/_internal/utils/misc.py @@ -96,7 +96,7 @@ def get_prog(): try: prog = os.path.basename(sys.argv[0]) @@ -433,10 +433,10 @@ index 3236af63..439a831d 100644 else: return prog except (AttributeError, TypeError, IndexError): -diff --git a/pipenv/patched/_internal/utils/packaging.py b/pipenv/patched/_internal/utils/packaging.py +diff --git a/pipenv/patched/pip/_internal/utils/packaging.py b/pipenv/patched/pip/_internal/utils/packaging.py index 5f9bb93d..276a9ccc 100644 ---- a/pipenv/patched/_internal/utils/packaging.py -+++ b/pipenv/patched/_internal/utils/packaging.py +--- a/pipenv/patched/pip/_internal/utils/packaging.py ++++ b/pipenv/patched/pip/_internal/utils/packaging.py @@ -28,7 +28,7 @@ def check_requires_python(requires_python): requires_python_specifier = specifiers.SpecifierSet(requires_python) @@ -471,10 +471,10 @@ index 5f9bb93d..276a9ccc 100644 except specifiers.InvalidSpecifier as e: logger.warning( "Package %s has an invalid Requires-Python entry %s - %s", -diff --git a/pipenv/patched/_internal/wheel.py b/pipenv/patched/_internal/wheel.py +diff --git a/pipenv/patched/pip/_internal/wheel.py b/pipenv/patched/pip/_internal/wheel.py index fcf9d3d3..d8aff848 100644 ---- a/pipenv/patched/_internal/wheel.py -+++ b/pipenv/patched/_internal/wheel.py +--- a/pipenv/patched/pip/_internal/wheel.py ++++ b/pipenv/patched/pip/_internal/wheel.py @@ -83,7 +83,7 @@ def fix_script(path): firstline = script.readline() if not firstline.startswith(b'#!python'): From b2957af35ce5aff3f774dad1a1d413f2237d7106 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:29:56 -0400 Subject: [PATCH 10/35] Update passa Signed-off-by: Dan Ryan --- pipenv/vendor/passa/LICENSE | 13 + pipenv/vendor/passa/__init__.py | 2 +- pipenv/vendor/passa/_pip.py | 315 ------------------ pipenv/vendor/passa/_pip_shims.py | 61 ---- pipenv/vendor/passa/caches.py | 214 ------------ pipenv/vendor/passa/candidates.py | 81 ----- pipenv/vendor/passa/cli.py | 115 ------- pipenv/vendor/passa/cli/_base.py | 23 +- pipenv/vendor/passa/cli/add.py | 4 +- pipenv/vendor/passa/cli/clean.py | 4 +- pipenv/vendor/passa/cli/install.py | 4 +- pipenv/vendor/passa/cli/lock.py | 2 +- pipenv/vendor/passa/cli/remove.py | 4 +- pipenv/vendor/passa/cli/sync.py | 4 +- pipenv/vendor/passa/cli/upgrade.py | 7 +- pipenv/vendor/passa/dependencies.py | 253 -------------- pipenv/vendor/passa/dependencies_pip.py | 187 ----------- pipenv/vendor/passa/hashes.py | 61 ---- pipenv/vendor/passa/internals/candidates.py | 20 +- pipenv/vendor/passa/internals/dependencies.py | 12 +- pipenv/vendor/passa/internals/lockers.py | 17 +- .../vendor/passa/{ => internals}/projects.py | 2 +- pipenv/vendor/passa/internals/providers.py | 30 +- .../vendor/passa/{ => internals}/reporters.py | 0 pipenv/vendor/passa/internals/utils.py | 12 + pipenv/vendor/passa/lockers.py | 182 ---------- pipenv/vendor/passa/locking.py | 105 ------ pipenv/vendor/passa/markers.py | 228 ------------- pipenv/vendor/passa/metadata.py | 169 ---------- pipenv/vendor/passa/operations/_utils.py | 0 pipenv/vendor/passa/operations/lock.py | 2 +- pipenv/vendor/passa/providers.py | 167 ---------- pipenv/vendor/passa/reporters/__init__.py | 31 -- pipenv/vendor/passa/reporters/base.py | 52 --- pipenv/vendor/passa/reporters/stdout.py | 106 ------ pipenv/vendor/passa/synchronizers.py | 211 ------------ pipenv/vendor/passa/traces.py | 40 --- pipenv/vendor/passa/utils.py | 97 ------ pipenv/vendor/passa/vcs.py | 37 -- 39 files changed, 113 insertions(+), 2761 deletions(-) create mode 100644 pipenv/vendor/passa/LICENSE delete mode 100644 pipenv/vendor/passa/_pip.py delete mode 100644 pipenv/vendor/passa/_pip_shims.py delete mode 100644 pipenv/vendor/passa/caches.py delete mode 100644 pipenv/vendor/passa/candidates.py delete mode 100644 pipenv/vendor/passa/cli.py delete mode 100644 pipenv/vendor/passa/dependencies.py delete mode 100644 pipenv/vendor/passa/dependencies_pip.py delete mode 100644 pipenv/vendor/passa/hashes.py rename pipenv/vendor/passa/{ => internals}/projects.py (99%) rename pipenv/vendor/passa/{ => internals}/reporters.py (100%) delete mode 100644 pipenv/vendor/passa/lockers.py delete mode 100644 pipenv/vendor/passa/locking.py delete mode 100644 pipenv/vendor/passa/markers.py delete mode 100644 pipenv/vendor/passa/metadata.py delete mode 100644 pipenv/vendor/passa/operations/_utils.py delete mode 100644 pipenv/vendor/passa/providers.py delete mode 100644 pipenv/vendor/passa/reporters/__init__.py delete mode 100644 pipenv/vendor/passa/reporters/base.py delete mode 100644 pipenv/vendor/passa/reporters/stdout.py delete mode 100644 pipenv/vendor/passa/synchronizers.py delete mode 100644 pipenv/vendor/passa/traces.py delete mode 100644 pipenv/vendor/passa/utils.py delete mode 100644 pipenv/vendor/passa/vcs.py diff --git a/pipenv/vendor/passa/LICENSE b/pipenv/vendor/passa/LICENSE new file mode 100644 index 0000000000..e1a278e7b3 --- /dev/null +++ b/pipenv/vendor/passa/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Dan Ryan + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/passa/__init__.py b/pipenv/vendor/passa/__init__.py index 6f92267d8a..ea633f0a9c 100644 --- a/pipenv/vendor/passa/__init__.py +++ b/pipenv/vendor/passa/__init__.py @@ -4,4 +4,4 @@ '__version__' ] -__version__ = '0.3.0' +__version__ = '0.3.1.dev0' diff --git a/pipenv/vendor/passa/_pip.py b/pipenv/vendor/passa/_pip.py deleted file mode 100644 index 5cf1cea8a9..0000000000 --- a/pipenv/vendor/passa/_pip.py +++ /dev/null @@ -1,315 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import contextlib -import distutils.log -import os - -import setuptools.dist - -import distlib.scripts -import distlib.wheel -import pip_shims -import six -import vistir - -from ._pip_shims import VCS_SUPPORT, build_wheel as _build_wheel, unpack_url -from .caches import CACHE_DIR -from .utils import filter_sources - - -@vistir.path.ensure_mkdir_p(mode=0o775) -def _get_src_dir(): - src = os.environ.get("PIP_SRC") - if src: - return src - virtual_env = os.environ.get("VIRTUAL_ENV") - if virtual_env: - return os.path.join(virtual_env, "src") - temp_src = vistir.path.create_tracked_tempdir(prefix='passa-src') - return temp_src - - -def _prepare_wheel_building_kwargs(ireq): - download_dir = os.path.join(CACHE_DIR, "pkgs") - vistir.mkdir_p(download_dir) - - wheel_download_dir = os.path.join(CACHE_DIR, "wheels") - vistir.mkdir_p(wheel_download_dir) - - if ireq.source_dir is None: - src_dir = _get_src_dir() - else: - src_dir = ireq.source_dir - - # This logic matches pip's behavior, although I don't fully understand the - # intention. I guess the idea is to build editables in-place, otherwise out - # of the source tree? - if ireq.editable: - build_dir = src_dir - else: - build_dir = vistir.path.create_tracked_tempdir(prefix="passa-build") - - return { - "build_dir": build_dir, - "src_dir": src_dir, - "download_dir": download_dir, - "wheel_download_dir": wheel_download_dir, - } - - -def _get_pip_index_urls(sources): - index_urls = [] - trusted_hosts = [] - for source in sources: - url = source.get("url") - if not url: - continue - index_urls.append(url) - if source.get("verify_ssl", True): - continue - host = six.moves.urllib.parse.urlparse(source["url"]).hostname - trusted_hosts.append(host) - return index_urls, trusted_hosts - - -class _PipCommand(pip_shims.Command): - name = "PipCommand" - - -def _get_pip_session(trusted_hosts): - cmd = _PipCommand() - options, _ = cmd.parser.parse_args([]) - options.cache_dir = CACHE_DIR - options.trusted_hosts = trusted_hosts - session = cmd._build_session(options) - return session - - -def _get_finder(sources): - index_urls, trusted_hosts = _get_pip_index_urls(sources) - session = _get_pip_session(trusted_hosts) - finder = pip_shims.PackageFinder( - find_links=[], - index_urls=index_urls, - trusted_hosts=trusted_hosts, - allow_all_prereleases=True, - session=session, - ) - return finder - - -def _get_wheel_cache(): - format_control = pip_shims.FormatControl(set(), set()) - wheel_cache = pip_shims.WheelCache(CACHE_DIR, format_control) - return wheel_cache - - -def _convert_hashes(values): - """Convert Pipfile.lock hash lines into InstallRequirement option format. - - The option format uses a str-list mapping. Keys are hash algorithms, and - the list contains all values of that algorithm. - """ - hashes = {} - if not values: - return hashes - for value in values: - try: - name, value = value.split(":", 1) - except ValueError: - name = "sha256" - if name not in hashes: - hashes[name] = [] - hashes[name].append(value) - return hashes - - -def build_wheel(ireq, sources, hashes=None): - """Build a wheel file for the InstallRequirement object. - - An artifact is downloaded (or read from cache). If the artifact is not a - wheel, build one out of it. The dynamically built wheel is ephemeral; do - not depend on its existence after the returned wheel goes out of scope. - - If `hashes` is truthy, it is assumed to be a list of hashes (as formatted - in Pipfile.lock) to be checked against the download. - - Returns a `distlib.wheel.Wheel` instance. Raises a `RuntimeError` if the - wheel cannot be built. - """ - kwargs = _prepare_wheel_building_kwargs(ireq) - finder = _get_finder(sources) - - # Not for upgrade, hash not required. Hashes are not required here even - # when we provide them, because pip skips local wheel cache if we set it - # to True. Hashes are checked later if we need to download the file. - ireq.populate_link(finder, False, False) - - # Ensure ireq.source_dir is set. - # This is intentionally set to build_dir, not src_dir. Comments from pip: - # [...] if filesystem packages are not marked editable in a req, a non - # deterministic error occurs when the script attempts to unpack the - # build directory. - # Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq - # is editable, build_dir is actually src_dir, making the build in-place. - ireq.ensure_has_source_dir(kwargs["build_dir"]) - - # Ensure the remote artifact is downloaded locally. For wheels, it is - # enough to just download because we'll use them directly. For an sdist, - # we need to unpack so we can build it. - if not pip_shims.is_file_url(ireq.link): - if ireq.is_wheel: - only_download = True - download_dir = kwargs["wheel_download_dir"] - else: - only_download = False - download_dir = kwargs["download_dir"] - ireq.options["hashes"] = _convert_hashes(hashes) - unpack_url( - ireq.link, ireq.source_dir, download_dir, - only_download=only_download, session=finder.session, - hashes=ireq.hashes(False), progress_bar=False, - ) - - if ireq.is_wheel: - # If this is a wheel, use the downloaded thing. - output_dir = kwargs["wheel_download_dir"] - wheel_path = os.path.join(output_dir, ireq.link.filename) - else: - # Othereise we need to build an ephemeral wheel. - wheel_path = _build_wheel( - ireq, vistir.path.create_tracked_tempdir(prefix="ephem"), - finder, _get_wheel_cache(), kwargs, - ) - if wheel_path is None or not os.path.exists(wheel_path): - raise RuntimeError("failed to build wheel from {}".format(ireq)) - return distlib.wheel.Wheel(wheel_path) - - -def _obtrain_ref(vcs_obj, src_dir, name, rev=None): - target_dir = os.path.join(src_dir, name) - target_rev = vcs_obj.make_rev_options(rev) - if not os.path.exists(target_dir): - vcs_obj.obtain(target_dir) - if (not vcs_obj.is_commit_id_equal(target_dir, rev) and - not vcs_obj.is_commit_id_equal(target_dir, target_rev)): - vcs_obj.update(target_dir, target_rev) - return vcs_obj.get_revision(target_dir) - - -def get_vcs_ref(requirement): - backend = VCS_SUPPORT._registry.get(requirement.vcs) - vcs = backend(url=requirement.req.vcs_uri) - src = _get_src_dir() - name = requirement.normalized_name - ref = _obtrain_ref(vcs, src, name, rev=requirement.req.ref) - return ref - - -def find_installation_candidates(ireq, sources): - finder = _get_finder(sources) - return finder.find_all_candidates(ireq.name) - - -class RequirementUninstallation(object): - """A context manager to remove a package for the inner block. - - This uses `UninstallPathSet` to control the workflow. If the inner block - exits correctly, the uninstallation is committed, otherwise rolled back. - """ - def __init__(self, ireq, auto_confirm, verbose): - self.ireq = ireq - self.pathset = None - self.auto_confirm = auto_confirm - self.verbose = verbose - - def __enter__(self): - self.pathset = self.ireq.uninstall( - auto_confirm=self.auto_confirm, - verbose=self.verbose, - ) - return self.pathset - - def __exit__(self, exc_type, exc_value, traceback): - if self.pathset is None: - return - if exc_type is None: - self.pathset.commit() - else: - self.pathset.rollback() - - -def uninstall_requirement(ireq, **kwargs): - return RequirementUninstallation(ireq, **kwargs) - - -@contextlib.contextmanager -def _suppress_distutils_logs(): - """Hack to hide noise generated by `setup.py develop`. - - There isn't a good way to suppress them now, so let's monky-patch. - See https://bugs.python.org/issue25392. - """ - f = distutils.log.Log._log - - def _log(log, level, msg, args): - if level >= distutils.log.ERROR: - f(log, level, msg, args) - - distutils.log.Log._log = _log - yield - distutils.log.Log._log = f - - -class NoopInstaller(object): - """An installer. - - This class is not designed to be instantiated by itself, but used as a - common interface for subclassing. - - An installer has two methods, `prepare()` and `install()`. Neither takes - arguments, and should be called in that order to prepare an installation - operation, and to actually install things. - """ - def prepare(self): - pass - - def install(self): - pass - - -class EditableInstaller(NoopInstaller): - """Installer to handle editable. - """ - def __init__(self, requirement): - ireq = requirement.as_ireq() - self.working_directory = ireq.setup_py_dir - self.setup_py = ireq.setup_py - - def install(self): - with vistir.cd(self.working_directory), _suppress_distutils_logs(): - # Access from Setuptools to ensure things are patched correctly. - setuptools.dist.distutils.core.run_setup( - self.setup_py, ["develop", "--no-deps"], - ) - - -class WheelInstaller(NoopInstaller): - """Installer by building a wheel. - - The wheel is built during `prepare()`, and installed in `install()`. - """ - def __init__(self, requirement, sources, paths): - self.ireq = requirement.as_ireq() - self.sources = filter_sources(requirement, sources) - self.hashes = requirement.hashes or None - self.paths = paths - self.wheel = None - - def prepare(self): - self.wheel = build_wheel(self.ireq, self.sources, self.hashes) - - def install(self): - self.wheel.install(self.paths, distlib.scripts.ScriptMaker(None, None)) diff --git a/pipenv/vendor/passa/_pip_shims.py b/pipenv/vendor/passa/_pip_shims.py deleted file mode 100644 index b2c7b6ea31..0000000000 --- a/pipenv/vendor/passa/_pip_shims.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding=utf-8 -*- - -"""Shims to make the pip interface more consistent accross versions. - -There are currently two members: - -* VCS_SUPPORT is an instance of VcsSupport. -* build_wheel abstracts the process to build a wheel out of a bunch parameters. -* unpack_url wraps the actual function in pip to accept modern parameters. -""" - -from __future__ import absolute_import, unicode_literals - -import pip_shims - - -def _build_wheel_pre10(ireq, output_dir, finder, wheel_cache, kwargs): - kwargs.update({"wheel_cache": wheel_cache, "session": finder.session}) - reqset = pip_shims.RequirementSet(**kwargs) - builder = pip_shims.WheelBuilder(reqset, finder) - return builder._build_one(ireq, output_dir) - - -def _build_wheel_modern(ireq, output_dir, finder, wheel_cache, kwargs): - """Build a wheel. - - * ireq: The InstallRequirement object to build - * output_dir: The directory to build the wheel in. - * finder: pip's internal Finder object to find the source out of ireq. - * kwargs: Various keyword arguments from `_prepare_wheel_building_kwargs`. - """ - kwargs.update({"progress_bar": "off", "build_isolation": False}) - with pip_shims.RequirementTracker() as req_tracker: - if req_tracker: - kwargs["req_tracker"] = req_tracker - preparer = pip_shims.RequirementPreparer(**kwargs) - builder = pip_shims.WheelBuilder(finder, preparer, wheel_cache) - return builder._build_one(ireq, output_dir) - - -def _unpack_url_pre10(*args, **kwargs): - """Shim for unpack_url in various pip versions. - - pip before 10.0 does not accept `progress_bar` here. Simply drop it. - """ - kwargs.pop("progress_bar", None) - return pip_shims.unpack_url(*args, **kwargs) - - -PIP_VERSION = pip_shims.utils._parse(pip_shims.pip_version) -VERSION_10 = pip_shims.utils._parse("10") - - -VCS_SUPPORT = pip_shims.VcsSupport() - -build_wheel = _build_wheel_modern -unpack_url = pip_shims.unpack_url - -if PIP_VERSION < VERSION_10: - build_wheel = _build_wheel_pre10 - unpack_url = _unpack_url_pre10 diff --git a/pipenv/vendor/passa/caches.py b/pipenv/vendor/passa/caches.py deleted file mode 100644 index 6d3131fa53..0000000000 --- a/pipenv/vendor/passa/caches.py +++ /dev/null @@ -1,214 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import copy -import hashlib -import json -import os -import sys - -import appdirs -import pip_shims -import requests -import vistir - -from ._pip_shims import VCS_SUPPORT -from .utils import get_pinned_version - - -CACHE_DIR = os.environ.get("PASSA_CACHE_DIR", appdirs.user_cache_dir("passa")) - - -class HashCache(pip_shims.SafeFileCache): - """Caches hashes of PyPI artifacts so we do not need to re-download them. - - Hashes are only cached when the URL appears to contain a hash in it and the - cache key includes the hash value returned from the server). This ought to - avoid ssues where the location on the server changes. - """ - def __init__(self, *args, **kwargs): - session = kwargs.pop('session', requests.session()) - self.session = session - kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache')) - super(HashCache, self).__init__(*args, **kwargs) - - def get_hash(self, location): - # If there is no location hash (i.e., md5, sha256, etc.), we don't want - # to store it. - hash_value = None - orig_scheme = location.scheme - new_location = copy.deepcopy(location) - if orig_scheme in VCS_SUPPORT.all_schemes: - new_location.url = new_location.url.split("+", 1)[-1] - can_hash = new_location.hash - if can_hash: - # hash url WITH fragment - hash_value = self.get(new_location.url) - if not hash_value: - hash_value = self._get_file_hash(new_location) - hash_value = hash_value.encode('utf8') - if can_hash: - self.set(new_location.url, hash_value) - return hash_value.decode('utf8') - - def _get_file_hash(self, location): - h = hashlib.new(pip_shims.FAVORITE_HASH) - with vistir.open_file(location, self.session) as fp: - for chunk in iter(lambda: fp.read(8096), b""): - h.update(chunk) - return ":".join([h.name, h.hexdigest()]) - - -# pip-tools's dependency cache implementation. -class CorruptCacheError(Exception): - def __init__(self, path): - self.path = path - - def __str__(self): - lines = [ - 'The dependency cache seems to have been corrupted.', - 'Inspect, or delete, the following file:', - ' {}'.format(self.path), - ] - return os.linesep.join(lines) - - -def _key_from_req(req): - """Get an all-lowercase version of the requirement's name.""" - if hasattr(req, 'key'): - # from pkg_resources, such as installed dists for pip-sync - key = req.key - else: - # from packaging, such as install requirements from requirements.txt - key = req.name - - key = key.replace('_', '-').lower() - return key - - -def _read_cache_file(cache_file_path): - with open(cache_file_path, 'r') as cache_file: - try: - doc = json.load(cache_file) - except ValueError: - raise CorruptCacheError(cache_file_path) - - # Check version and load the contents - assert doc['__format__'] == 1, 'Unknown cache file format' - return doc['dependencies'] - - -class _JSONCache(object): - """A persistent cache backed by a JSON file. - - The cache file is written to the appropriate user cache dir for the - current platform, i.e. - - ~/.cache/pip-tools/depcache-pyX.Y.json - - Where X.Y indicates the Python version. - """ - filename_format = None - - def __init__(self, cache_dir=CACHE_DIR): - vistir.mkdir_p(cache_dir) - python_version = ".".join(str(digit) for digit in sys.version_info[:2]) - cache_filename = self.filename_format.format( - python_version=python_version, - ) - self._cache_file = os.path.join(cache_dir, cache_filename) - self._cache = None - - @property - def cache(self): - """The dictionary that is the actual in-memory cache. - - This property lazily loads the cache from disk. - """ - if self._cache is None: - self.read_cache() - return self._cache - - def as_cache_key(self, ireq): - """Given a requirement, return its cache key. - - This behavior is a little weird in order to allow backwards - compatibility with cache files. For a requirement without extras, this - will return, for example:: - - ("ipython", "2.1.0") - - For a requirement with extras, the extras will be comma-separated and - appended to the version, inside brackets, like so:: - - ("ipython", "2.1.0[nbconvert,notebook]") - """ - extras = tuple(sorted(ireq.extras)) - if not extras: - extras_string = "" - else: - extras_string = "[{}]".format(",".join(extras)) - name = _key_from_req(ireq.req) - version = get_pinned_version(ireq) - return name, "{}{}".format(version, extras_string) - - def read_cache(self): - """Reads the cached contents into memory. - """ - if os.path.exists(self._cache_file): - self._cache = _read_cache_file(self._cache_file) - else: - self._cache = {} - - def write_cache(self): - """Writes the cache to disk as JSON. - """ - doc = { - '__format__': 1, - 'dependencies': self._cache, - } - with open(self._cache_file, 'w') as f: - json.dump(doc, f, sort_keys=True) - - def clear(self): - self._cache = {} - self.write_cache() - - def __contains__(self, ireq): - pkgname, pkgversion_and_extras = self.as_cache_key(ireq) - return pkgversion_and_extras in self.cache.get(pkgname, {}) - - def __getitem__(self, ireq): - pkgname, pkgversion_and_extras = self.as_cache_key(ireq) - return self.cache[pkgname][pkgversion_and_extras] - - def __setitem__(self, ireq, values): - pkgname, pkgversion_and_extras = self.as_cache_key(ireq) - self.cache.setdefault(pkgname, {}) - self.cache[pkgname][pkgversion_and_extras] = values - self.write_cache() - - def __delitem__(self, ireq): - pkgname, pkgversion_and_extras = self.as_cache_key(ireq) - try: - del self.cache[pkgname][pkgversion_and_extras] - except KeyError: - return - self.write_cache() - - def get(self, ireq, default=None): - pkgname, pkgversion_and_extras = self.as_cache_key(ireq) - return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default) - - -class DependencyCache(_JSONCache): - """Cache the dependency of cancidates. - """ - filename_format = "depcache-py{python_version}.json" - - -class RequiresPythonCache(_JSONCache): - """Cache a candidate's Requires-Python information. - """ - filename_format = "pyreqcache-py{python_version}.json" diff --git a/pipenv/vendor/passa/candidates.py b/pipenv/vendor/passa/candidates.py deleted file mode 100644 index d5390d65ee..0000000000 --- a/pipenv/vendor/passa/candidates.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import os -import sys - -import packaging.specifiers -import packaging.version -import requirementslib - -from ._pip import find_installation_candidates, get_vcs_ref - - -def _filter_matching_python_requirement(candidates, python_version): - for c in candidates: - try: - requires_python = c.requires_python - except AttributeError: - requires_python = c.location.requires_python - if python_version and requires_python: - # Old specifications had people setting this to single digits - # which is effectively the same as '>=digit, 1} - if name not in names and not (names & routes): - continue - yield ( - requirement.normalized_name, - next(iter(requirement.as_pipfile().values())) - ) - - -class AbstractLocker(object): - """Helper class to produce a new lock file for a project. - - This is not intended for instantiation. You should use one of its concrete - subclasses instead. The class contains logic to: - - * Prepare a project for locking - * Perform the actually resolver invocation - * Convert resolver output into lock file format - * Update the project to have the new lock file - """ - def __init__(self, project): - self.project = project - self.default_requirements = _get_requirements( - project.pipfile, "packages", - ) - self.develop_requirements = _get_requirements( - project.pipfile, "dev-packages", - ) - - # This comprehension dance ensures we merge packages from both - # sections, and definitions in the default section win. - self.requirements = {k: r for k, r in itertools.chain( - self.develop_requirements.items(), - self.default_requirements.items(), - )}.values() - - self.sources = [s._data.copy() for s in project.pipfile.sources] - self.allow_prereleases = bool( - project.pipfile.get("pipenv", {}).get("allow_prereleases", False), - ) - - def __repr__(self): - return "<{0} @ {1!r}>".format(type(self).__name__, self.project.root) - - def get_provider(self): - raise NotImplementedError - - def get_reporter(self): - # TODO: Build SpinnerReporter, and use this only in verbose mode. - return StdOutReporter(self.requirements) - - def lock(self): - """Lock specified (abstract) requirements into (concrete) candidates. - - The locking procedure consists of four stages: - - * Resolve versions and dependency graph (powered by ResolveLib). - * Walk the graph to determine "why" each candidate came to be, i.e. - what top-level requirements result in a given candidate. - * Populate hashes for resolved candidates. - * Populate markers based on dependency specifications of each - candidate, and the dependency graph. - """ - provider = self.get_provider() - reporter = self.get_reporter() - resolver = resolvelib.Resolver(provider, reporter) - - with vistir.cd(self.project.root): - state = resolver.resolve(self.requirements) - - traces = trace_graph(state.graph) - - hash_cache = HashCache() - for r in state.mapping.values(): - if not r.hashes: - r.hashes = get_hashes(hash_cache, r) - - set_metadata( - state.mapping, traces, - provider.fetched_dependencies, provider.requires_pythons, - ) - - lockfile = plette.Lockfile.with_meta_from(self.project.pipfile) - lockfile["default"] = dict(_iter_derived_entries( - state, traces, self.default_requirements, - )) - lockfile["develop"] = dict(_iter_derived_entries( - state, traces, self.develop_requirements, - )) - self.project.lockfile = lockfile - - -class BasicLocker(AbstractLocker): - """Basic concrete locker. - - This takes a project, generates a lock file from its Pipfile, and sets - the lock file property to the project. - """ - def get_provider(self): - return BasicProvider( - self.requirements, self.sources, self.allow_prereleases, - ) - - -class PinReuseLocker(AbstractLocker): - """A specialized locker to handle re-locking based on existing pins. - - See :class:`passa.providers.PinReuseProvider` for more information. - """ - def __init__(self, project): - super(PinReuseLocker, self).__init__(project) - pins = _get_requirements(project.lockfile, "develop") - pins.update(_get_requirements(project.lockfile, "default")) - for pin in pins.values(): - pin.markers = None - self.preferred_pins = pins - - def get_provider(self): - return PinReuseProvider( - self.preferred_pins, - self.requirements, self.sources, self.allow_prereleases, - ) - - -class EagerUpgradeLocker(PinReuseLocker): - """A specialized locker to handle the "eager" upgrade strategy. - - See :class:`passa.providers.EagerUpgradeProvider` for more - information. - """ - def __init__(self, tracked_names, *args, **kwargs): - super(EagerUpgradeLocker, self).__init__(*args, **kwargs) - self.tracked_names = tracked_names - - def get_provider(self): - return EagerUpgradeProvider( - self.tracked_names, self.preferred_pins, - self.requirements, self.sources, self.allow_prereleases, - ) diff --git a/pipenv/vendor/passa/locking.py b/pipenv/vendor/passa/locking.py deleted file mode 100644 index e4b6ced57f..0000000000 --- a/pipenv/vendor/passa/locking.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import itertools - -from plette import Lockfile -from requirementslib import Requirement -from resolvelib import Resolver - -from .caches import HashCache -from .hashes import get_hashes -from .metadata import set_metadata -from .providers import RequirementsLibProvider -from .reporters import StdOutReporter -from .traces import trace_graph -from .utils import identify_requirment - - -def resolve_requirements(requirements, sources, pins, allow_pre): - """Lock specified (abstract) requirements into (concrete) candidates. - - The locking procedure consists of four stages: - - * Resolve versions and dependency graph (powered by ResolveLib). - * Walk the graph to determine "why" each candidate came to be, i.e. what - top-level requirements result in a given candidate. - * Populate hashes for resolved candidates. - * Populate markers based on dependency specifications of each candidate, - and the dependency graph. - """ - provider = RequirementsLibProvider(requirements, sources, pins, allow_pre) - reporter = StdOutReporter(requirements) - resolver = Resolver(provider, reporter) - - state = resolver.resolve(requirements) - traces = trace_graph(state.graph) - - hash_cache = HashCache() - for r in state.mapping.values(): - if not r.hashes: - r.hashes = get_hashes(hash_cache, r) - - set_metadata( - state.mapping, traces, - provider.fetched_dependencies, provider.requires_pythons, - ) - return state, traces - - -def _get_requirements(pipfile, section_name): - """Produce a mapping of identifier: requirement from the section. - """ - return {identify_requirment(r): r for r in ( - Requirement.from_pipfile(name, package._data) - for name, package in pipfile.get(section_name, {}).items() - )} - - -def _get_derived_entries(state, traces, names): - """Produce a mapping containing all candidates derived from `names`. - - `name` should provide a collection of requirement identifications from - a section (i.e. `packages` or `dev-packages`). This function uses `trace` - to filter out candidates in the state that are present because of an entry - in that collection. - """ - if not names: - return {} - return_map = {} - for req_name_from_state, req in state.mapping.items(): - req_traces = [trace[1] for trace in traces[req_name_from_state] if len(trace) > 1] - if req_name_from_state in names or len(set(names) & set(req_traces)): - return_map[req.normalized_name] = next(iter(req.as_pipfile().values())) - return return_map - - -def build_lockfile(pipfile, lockfile): - default_reqs = _get_requirements(pipfile, "packages") - develop_reqs = _get_requirements(pipfile, "dev-packages") - - pins = {} - if lockfile: - pins = _get_requirements(lockfile, "develop") - pins.update(_get_requirements(lockfile, "default")) - - # This comprehension dance ensures we merge packages from both - # sections, and definitions in the default section win. - requirements = {k: r for k, r in itertools.chain( - develop_reqs.items(), default_reqs.items(), - )}.values() - - sources = [s._data.copy() for s in pipfile.sources] - try: - allow_prereleases = bool(pipfile["pipenv"]["allow_prereleases"]) - except (KeyError, TypeError): - allow_prereleases = False - state, traces = resolve_requirements( - requirements, sources, pins, allow_prereleases, - ) - - new_lock = Lockfile.with_meta_from(pipfile) - new_lock["default"] = _get_derived_entries(state, traces, default_reqs) - new_lock["develop"] = _get_derived_entries(state, traces, develop_reqs) - return new_lock diff --git a/pipenv/vendor/passa/markers.py b/pipenv/vendor/passa/markers.py deleted file mode 100644 index 5f6f37d6f9..0000000000 --- a/pipenv/vendor/passa/markers.py +++ /dev/null @@ -1,228 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import itertools -import operator - -import vistir - -from packaging.specifiers import SpecifierSet, Specifier -from packaging.markers import Marker - - -PYTHON_BOUNDARIES = {2: 7, 3: 9} - - -def _strip_extra(elements): - """Remove the "extra == ..." operands from the list. - - This is not a comprehensive implementation, but relies on an important - characteristic of metadata generation: The "extra == ..." operand is always - associated with an "and" operator. This means that we can simply remove the - operand and the "and" operator associated with it. - """ - extra_indexes = [] - for i, element in enumerate(elements): - if isinstance(element, list): - cancelled = _strip_extra(element) - if cancelled: - extra_indexes.append(i) - elif isinstance(element, tuple) and element[0].value == "extra": - extra_indexes.append(i) - for i in reversed(extra_indexes): - del elements[i] - if i > 0 and elements[i - 1] == "and": - # Remove the "and" before it. - del elements[i - 1] - elif elements: - # This shouldn't ever happen, but is included for completeness. - # If there is not an "and" before this element, try to remove the - # operator after it. - del elements[0] - return (not elements) - - -def get_without_extra(marker): - """Build a new marker without the `extra == ...` part. - - The implementation relies very deep into packaging's internals, but I don't - have a better way now (except implementing the whole thing myself). - - This could return `None` if the `extra == ...` part is the only one in the - input marker. - """ - # TODO: Why is this very deep in the internals? Why is a better solution - # implementing it yourself when someone is already maintaining a codebase - # for this? It's literally a grammar implementation that is required to - # meet the demands of a pep... -d - if not marker: - return None - marker = Marker(str(marker)) - elements = marker._markers - _strip_extra(elements) - if elements: - return marker - return None - - -def _markers_collect_extras(markers, collection): - # Optimization: the marker element is usually appended at the end. - for el in reversed(markers): - if (isinstance(el, tuple) and - el[0].value == "extra" and - el[1].value == "=="): - collection.add(el[2].value) - elif isinstance(el, list): - _markers_collect_extras(el, collection) - - -def get_contained_extras(marker): - """Collect "extra == ..." operands from a marker. - - Returns a list of str. Each str is a speficied extra in this marker. - """ - if not marker: - return set() - marker = Marker(str(marker)) - extras = set() - _markers_collect_extras(marker._markers, extras) - return extras - - -def _markers_contains_extra(markers): - # Optimization: the marker element is usually appended at the end. - for element in reversed(markers): - if isinstance(element, tuple) and element[0].value == "extra": - return True - elif isinstance(element, list): - if _markers_contains_extra(element): - return True - return False - - -def contains_extra(marker): - """Check whehter a marker contains an "extra == ..." operand. - """ - if not marker: - return False - marker = Marker(str(marker)) - return _markers_contains_extra(marker._markers) - - -def format_pyspec(specifier): - if isinstance(specifier, str): - if not any(operator in specifier for operator in Specifier._operators.keys()): - new_op = "==" - new_version = specifier - return Specifier("{0}{1}".format(new_op, new_version)) - version = specifier._coerce_version(specifier.version.replace(".*", "")) - version_tuple = version._version.release - if specifier.operator in (">", "<="): - # Prefer to always pick the operator for version n+1 - if version_tuple[1] < PYTHON_BOUNDARIES.get(version_tuple[0], 0): - if specifier.operator == ">": - new_op = ">=" - else: - new_op = "<" - new_version = (version_tuple[0], version_tuple[1] + 1) - specifier = Specifier("{0}{1}".format(new_op, version_to_str(new_version))) - return specifier - - -def make_version_tuple(version): - return tuple([int(x) for x in version.split(".")]) - - -def version_to_str(version): - return ".".join([str(i) for i in version]) - - -def get_specs(specset): - if isinstance(specset, Specifier): - specset = str(specset) - if isinstance(specset, str): - specset = SpecifierSet(specset.replace(".*", "")) - - specs = getattr(specset, "_specs", None) - return [(spec._spec[0], make_version_tuple(spec._spec[1])) for spec in list(specs)] - - -def group_by_version(versions): - versions = sorted(map(lambda x: make_version_tuple(x))) - grouping = itertools.groupby(versions, key=operator.itemgetter(0)) - return grouping - - -def group_by_op(specs): - specs = [get_specs(x) for x in list(specs)] - flattened = [(op, version) for spec in specs for op, version in spec] - specs = sorted(flattened, key=operator.itemgetter(1)) - grouping = itertools.groupby(specs, key=operator.itemgetter(0)) - return grouping - - -def marker_to_spec(marker): - if marker._markers[0][0] != 'python_version': - return - operator = marker._markers[0][1].value - version = marker._markers[0][2].value - specset = set() - if operator in ("in", "not in"): - op = "==" if operator == "in" else "!=" - specset |= set([Specifier("{0}{1}".format(op, v.strip())) for v in version.split(",")]) - else: - spec = Specifier("".join([operator, version])) - specset.add(spec) - if specset: - return specset - return None - - -def cleanup_specs(specs, operator="or"): - specs = {format_pyspec(spec) for spec in specs} - # for != operator we want to group by version - # if all are consecutive, join as a list - results = set() - for op, versions in group_by_op(specs): - versions = [version[1] for version in versions] - versions = sorted(vistir.misc.dedup(versions)) - # if we are doing an or operation, we need to use the min for >= - # this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6 - # if we do an AND operation we need to use MAX to be more selective - if op in (">", ">="): - if operator == "or": - results.add((op, version_to_str(min(versions)))) - else: - results.add((op, version_to_str(max(versions)))) - # we use inverse logic here so we will take the max value if we are using OR - # but the min value if we are using AND - elif op in ("<=", "<"): - if operator == "or": - results.add((op, version_to_str(max(versions)))) - else: - results.add((op, version_to_str(min(versions)))) - # leave these the same no matter what operator we use - elif op in ("!=", "==", "~="): - version_list = sorted(["{0}".format(version_to_str(version)) for version in versions]) - version = ", ".join(version_list) - if len(version_list) == 1: - results.add((op, version)) - else: - if op == "!=": - results.add(("not in", version)) - elif op == "==": - results.add(("in", version)) - else: - version = ", ".join(sorted(["{0}".format(op, v) for v in version_list])) - specifier = SpecifierSet(version)._specs - for s in specifier: - results &= (specifier._spec[0], specifier._spec[1]) - else: - if len(version) == 1: - results.add((op, version)) - else: - specifier = SpecifierSet("{0}".format(version))._specs - for s in specifier: - results |= (specifier._spec[0], specifier._spec[1]) - return results diff --git a/pipenv/vendor/passa/metadata.py b/pipenv/vendor/passa/metadata.py deleted file mode 100644 index 312691a19c..0000000000 --- a/pipenv/vendor/passa/metadata.py +++ /dev/null @@ -1,169 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import copy -import itertools - -import packaging.markers -import packaging.specifiers -import vistir -import vistir.misc - -from .markers import get_without_extra, cleanup_specs, marker_to_spec - - -def dedup_markers(s): - # TODO: Implement better logic. - deduped = sorted(vistir.misc.dedup(s)) - return deduped - - -class MetaSet(object): - """Representation of a "metadata set". - - This holds multiple metadata representaions. Each metadata representation - includes a marker, and a specifier set of Python versions required. - """ - def __init__(self): - self.markerset = frozenset() - self.pyspecset = packaging.specifiers.SpecifierSet() - - def __repr__(self): - return "MetaSet(markerset={0!r}, pyspecset={1!r})".format( - ",".join(sorted(self.markerset)), str(self.pyspecset), - ) - - def __str__(self): - pyspecs = set() - markerset = set() - for m in self.markerset: - py_marker = marker_to_spec(packaging.markers.Marker(m)) - if py_marker: - pyspecs.add(py_marker) - else: - markerset.add(m) - if pyspecs: - self.pyspecset._specs &= pyspecs - self.markerset = frozenset(markerset) - return " and ".join(dedup_markers(itertools.chain( - # Make sure to always use the same quotes so we can dedup properly. - ( - "{0}".format(ms) if " or " in ms else ms - for ms in (str(m).replace('"', "'") for m in self.markerset) - ), - ( - "python_version {0[0]} '{0[1]}'".format(spec) - for spec in cleanup_specs(self.pyspecset) - ), - ))) - - def __bool__(self): - return bool(self.markerset or self.pyspecset) - - def __nonzero__(self): # Python 2. - return self.__bool__() - - def __or__(self, pair): - marker, specset = pair - markerset = set(self.markerset) - pyspec_markers = set() - if marker: - pyspec_markers = marker_to_spec(marker) - if not pyspec_markers: - markerset.add(str(marker)) - else: - specset._specs &= pyspec_markers - metaset = MetaSet() - metaset.markerset = frozenset(markerset) - # TODO: Implement some logic to clean up dups like '3.0.*' and '3.0'. - metaset.pyspecset &= self.pyspecset & specset - return metaset - - -def _build_metasets(dependencies, pythons, key, trace, all_metasets): - all_parent_metasets = [] - for route in trace: - parent = route[-1] - try: - parent_metasets = all_metasets[parent] - except KeyError: # Parent not calculated yet. Wait for it. - return - all_parent_metasets.append((parent, parent_metasets)) - - metaset_iters = [] - for parent, parent_metasets in all_parent_metasets: - r = dependencies[parent][key] - python = pythons[key] - metaset = ( - get_without_extra(r.markers), - packaging.specifiers.SpecifierSet(python), - ) - metaset_iters.append( - parent_metaset | metaset - for parent_metaset in parent_metasets - ) - return list(itertools.chain.from_iterable(metaset_iters)) - - -def _calculate_metasets_mapping(dependencies, pythons, traces): - all_metasets = {None: [MetaSet()]} - - del traces[None] - while traces: - new_metasets = {} - for key, trace in traces.items(): - assert key not in all_metasets, key # Sanity check for debug. - metasets = _build_metasets( - dependencies, pythons, key, trace, all_metasets, - ) - if metasets is None: - continue - new_metasets[key] = metasets - if not new_metasets: - break # No progress? Deadlocked. Give up. - all_metasets.update(new_metasets) - for key in new_metasets: - del traces[key] - - return all_metasets - - -def _format_metasets(metasets): - # If there is an unconditional route, this needs to be unconditional. - if not metasets or not all(metasets): - return None - - # This extra str(Marker()) call helps simplify the expression. - return str(packaging.markers.Marker(" or ".join( - "{0}".format(s) if " and " in s else s - for s in dedup_markers(str(metaset) for metaset in metasets - if metaset) - ))) - - -def set_metadata(candidates, traces, dependencies, pythons): - """Add "metadata" to candidates based on the dependency tree. - - Metadata for a candidate includes markers and a specifier for Python - version requirements. - - :param candidates: A key-candidate mapping. Candidates in the mapping will - have their markers set. - :param traces: A graph trace (produced by `traces.trace_graph`) providing - information about dependency relationships between candidates. - :param dependencies: A key-collection mapping containing what dependencies - each candidate in `candidates` requested. - :param pythons: A key-str mapping containing Requires-Python information - of each candidate. - - Keys in mappings and entries in the trace are identifiers of a package, as - implemented by the `identify` method of the resolver's provider. - - The candidates are modified in-place. - """ - metasets_mapping = _calculate_metasets_mapping( - dependencies, pythons, copy.deepcopy(traces), - ) - for key, candidate in candidates.items(): - candidate.markers = _format_metasets(metasets_mapping[key]) diff --git a/pipenv/vendor/passa/operations/_utils.py b/pipenv/vendor/passa/operations/_utils.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pipenv/vendor/passa/operations/lock.py b/pipenv/vendor/passa/operations/lock.py index a68d0b7d99..200735ac2d 100644 --- a/pipenv/vendor/passa/operations/lock.py +++ b/pipenv/vendor/passa/operations/lock.py @@ -4,7 +4,7 @@ from resolvelib import NoVersionsAvailable, ResolutionImpossible -from passa.reporters import print_requirement +from passa.internals.reporters import print_requirement def lock(locker): diff --git a/pipenv/vendor/passa/providers.py b/pipenv/vendor/passa/providers.py deleted file mode 100644 index 7dfa3306dd..0000000000 --- a/pipenv/vendor/passa/providers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, print_function, unicode_literals - -import os - -import resolvelib - -from .candidates import find_candidates -from .dependencies import get_dependencies -from .utils import filter_sources, identify_requirment - - -class BasicProvider(resolvelib.AbstractProvider): - """Provider implementation to interface with `requirementslib.Requirement`. - """ - def __init__(self, root_requirements, sources, allow_prereleases): - self.sources = sources - self.allow_prereleases = bool(allow_prereleases) - self.invalid_candidates = set() - - # Remember requirements of each pinned candidate. The resolver calls - # `get_dependencies()` only when it wants to repin, so the last time - # the dependencies we got when it is last called on a package, are - # the set used by the resolver. We use this later to trace how a given - # dependency is specified by a package. - self.fetched_dependencies = {None: { - self.identify(r): r for r in root_requirements - }} - # TODO: Find a way to resolve with multiple versions (by tricking - # runtime) Include multiple keys in pipfiles? - self.requires_pythons = {None: ""} # TODO: Don't use any value - - def identify(self, dependency): - return identify_requirment(dependency) - - def get_preference(self, resolution, candidates, information): - # TODO: Provide better sorting logic. This simply resolve the ones with - # less choices first. Not sophisticated, but sounds reasonable? - return len(candidates) - - def find_matches(self, requirement): - # TODO: Implement per-package prereleases flag. (pypa/pipenv#1696) - allow_prereleases = self.allow_prereleases - sources = filter_sources(requirement, self.sources) - candidates = find_candidates(requirement, sources, allow_prereleases) - return candidates - - def is_satisfied_by(self, requirement, candidate): - # A non-named requirement has exactly one candidate, as implemented in - # `find_matches()`. It must match. - if not requirement.is_named: - return True - - # Optimization: Everything matches if there are no specifiers. - if not requirement.specifiers: - return True - - # We can't handle old version strings before PEP 440. Drop them all. - # Practically this shouldn't be a problem if the user is specifying a - # remotely reasonable dependency not from before 2013. - candidate_line = candidate.as_line() - if candidate_line in self.invalid_candidates: - return False - try: - version = candidate.get_specifier().version - except ValueError: - print('ignoring invalid version {}'.format(candidate_line)) - self.invalid_candidates.add(candidate_line) - return False - - return requirement.as_ireq().specifier.contains(version) - - def get_dependencies(self, candidate): - sources = filter_sources(candidate, self.sources) - try: - dependencies, requires_python = get_dependencies( - candidate, sources=sources, - ) - except Exception as e: - if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): - raise - print("failed to get dependencies for {0!r}: {1}".format( - candidate.as_line(include_hashes=False), e, - )) - dependencies = [] - requires_python = "" - candidate_key = self.identify(candidate) - self.fetched_dependencies[candidate_key] = { - self.identify(r): r for r in dependencies - } - self.requires_pythons[candidate_key] = requires_python - return dependencies - - -class PinReuseProvider(BasicProvider): - """A provider that reuses preferred pins if possible. - - This is used to implement "add", "remove", and "only-if-needed upgrade", - where already-pinned candidates in Pipfile.lock should be preferred. - """ - def __init__(self, preferred_pins, *args, **kwargs): - super(PinReuseProvider, self).__init__(*args, **kwargs) - self.preferred_pins = preferred_pins - - def find_matches(self, requirement): - candidates = super(PinReuseProvider, self).find_matches(requirement) - try: - # Add the preferred pin. Remember the resolve prefer candidates - # at the end of the list, so the most preferred should be last. - candidates.append(self.preferred_pins[self.identify(requirement)]) - except KeyError: - pass - return candidates - - -class EagerUpgradeProvider(PinReuseProvider): - """A specialized provider to handle an "eager" upgrade strategy. - - An eager upgrade tries to upgrade not only packages specified, but also - their dependeices (recursively). This contrasts to the "only-if-needed" - default, which only promises to upgrade the specified package, and - prevents touching anything else if at all possible. - - The provider is implemented as to keep track of all dependencies of the - specified packages to upgrade, and free their pins when it has a chance. - """ - def __init__(self, tracked_names, *args, **kwargs): - super(EagerUpgradeProvider, self).__init__(*args, **kwargs) - self.tracked_names = set(tracked_names) - for name in tracked_names: - self.preferred_pins.pop(name, None) - - # HACK: Set this special flag to distinguish preferred pins from - # regular, to tell the resolver to NOT use them for tracked packages. - for pin in self.preferred_pins.values(): - pin._preferred_by_provider = True - - def is_satisfied_by(self, requirement, candidate): - # If this is a tracking package, tell the resolver out of using the - # preferred pin, and into a "normal" candidate selection process. - if (self.identify(requirement) in self.tracked_names and - getattr(candidate, "_preferred_by_provider", False)): - return False - return super(EagerUpgradeProvider, self).is_satisfied_by( - requirement, candidate, - ) - - def get_dependencies(self, candidate): - # If this package is being tracked for upgrade, remove pins of its - # dependencies, and start tracking these new packages. - dependencies = super(EagerUpgradeProvider, self).get_dependencies( - candidate, - ) - if self.identify(candidate) in self.tracked_names: - for dependency in dependencies: - name = self.identify(dependency) - self.tracked_names.add(name) - self.preferred_pins.pop(name, None) - return dependencies - - def get_preference(self, resolution, candidates, information): - # Resolve tracking packages so we have a chance to unpin them first. - name = self.identify(candidates[0]) - if name in self.tracked_names: - return -1 - return len(candidates) diff --git a/pipenv/vendor/passa/reporters/__init__.py b/pipenv/vendor/passa/reporters/__init__.py deleted file mode 100644 index dffe532788..0000000000 --- a/pipenv/vendor/passa/reporters/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, print_function, unicode_literals - -from .base import BaseReporter - - -_REPORTER = BaseReporter() - - -def _get_stdout_reporter(): - from .stdout import Reporter - return Reporter() - - -def configure_reporter(name): - global _REPORTER - _REPORTER = { - None: BaseReporter, - "stdout": _get_stdout_reporter, - }[name]() - - -def get_reporter(): - return _REPORTER - - -def report(event, context=None): - if context is None: - context = {} - _REPORTER.report(event, context) diff --git a/pipenv/vendor/passa/reporters/base.py b/pipenv/vendor/passa/reporters/base.py deleted file mode 100644 index 66a432cbc8..0000000000 --- a/pipenv/vendor/passa/reporters/base.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, print_function, unicode_literals - -import resolvelib - - -class ResolveLibReporter(resolvelib.BaseReporter): - """Implementation of a ResolveLib reporter that bridge messages. - """ - def __init__(self, parent): - super(ResolveLibReporter, self).__init__() - self.parent = parent - - def starting(self): - self.parent.report("resolvelib-starting", {"child": self}) - - def ending_round(self, index, state): - self.parent.report("resolvelib-ending-round", { - "child": self, "index": index, "state": state, - }) - - def ending(self, state): - self.parent.report("resolvelib-ending", { - "child": self, "state": state, - }) - - -class BaseReporter(object): - """Basic reporter that does nothing. - """ - def build_for_resolvelib(self): - """Build a reporter for ResolveLib. - """ - return ResolveLibReporter(self) - - def report(self, event, context): - """Report an event. - - The default behavior is to look for a "handle_EVENT" method on the - class to execute, or do nothing if there is no such method. - - :param event: A string to indicate the event. - :param context: A mapping containing appropriate data for the handling - function. - """ - handler_name = "handle_{}".format(event.replace("-", "_")) - try: - handler = getattr(self, handler_name) - except AttributeError: - return - handler(context or {}) diff --git a/pipenv/vendor/passa/reporters/stdout.py b/pipenv/vendor/passa/reporters/stdout.py deleted file mode 100644 index 2042337757..0000000000 --- a/pipenv/vendor/passa/reporters/stdout.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, print_function, unicode_literals - -from resolvelib import NoVersionsAvailable, ResolutionImpossible - -from .base import BaseReporter - - -def _print_title(text): - print('\n{:=^84}\n'.format(text)) - - -def _print_requirement(r, end='\n'): - print('{:>40}'.format(r.as_line(include_hashes=False)), end=end) - - -def _print_dependency(state, key): - _print_requirement(state.mapping[key], end='') - parents = sorted( - state.graph.iter_parents(key), - key=lambda n: (-1, '') if n is None else (ord(n[0].lower()), n), - ) - for i, p in enumerate(parents): - if p is None: - line = '(user)' - else: - line = state.mapping[p].as_line(include_hashes=False) - if i == 0: - padding = ' <= ' - else: - padding = ' ' * 44 - print('{pad}{line}'.format(pad=padding, line=line)) - - -class Reporter(BaseReporter): - """A reporter implementation that prints messages to stdout. - """ - def handle_resolvelib_starting(self, context): - context["child"]._prev_mapping = None - - def handle_resolvelib_ending_round(self, context): - _print_title(' Round {} '.format(context["index"])) - mapping = context["state"].mapping - if context["child"]._prev_mapping is None: - difference = set(mapping.keys()) - changed = set() - else: - prev = context["child"]._prev_mapping - difference = set(mapping.keys()) - set(prev.keys()) - changed = set( - k for k, v in mapping.items() - if k in prev and prev[k] != v - ) - context["child"]._prev_mapping = mapping - - if difference: - print('New pins: ') - for k in difference: - _print_dependency(context["state"], k) - print() - - if changed: - print('Changed pins:') - for k in changed: - _print_dependency(context["state"], k) - print() - - def handle_lock_starting(self, context): - _print_title(' User requirements ') - for r in context["requirements"]: - _print_requirement(r) - - def handle_lock_trace_ended(self, context): - _print_title(" STABLE PINS ") - mapping = context["state"].mapping - for k in sorted(mapping): - print(mapping[k].as_line(include_hashes=False)) - paths = context["traces"][k] - for path in paths: - if path == [None]: - print(' User requirement') - continue - print(' ', end='') - for v in reversed(path[1:]): - line = mapping[v].as_line(include_hashes=False) - print(' <=', line, end='') - print() - print() - - def handle_lock_failed(self, context): - e = context["exception"] - if isinstance(e, ResolutionImpossible): - print("\nCANNOT RESOLVE.\nOFFENDING REQUIREMENTS:") - for r in e.requirements: - _print_requirement(r) - elif isinstance(e, NoVersionsAvailable): - print("\nCANNOT RESOLVE. NO CANDIDATES FOUND FOR:") - print("{:>40}".format(e.requirement.as_line(include_hashes=False))) - if e.parent: - line = e.parent.as_line(include_hashes=False) - print("{:>41}".format("(from {})".format(line))) - else: - print("{:>41}".format("(user)")) - else: - raise diff --git a/pipenv/vendor/passa/synchronizers.py b/pipenv/vendor/passa/synchronizers.py deleted file mode 100644 index 30fc4492aa..0000000000 --- a/pipenv/vendor/passa/synchronizers.py +++ /dev/null @@ -1,211 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - -import collections -import contextlib -import os -import sys -import sysconfig - -import pkg_resources - -import packaging.markers -import packaging.version -import requirementslib - -from ._pip import uninstall_requirement, EditableInstaller, WheelInstaller - - -def _is_installation_local(name): - """Check whether the distribution is in the current Python installation. - - This is used to distinguish packages seen by a virtual environment. A venv - may be able to see global packages, but we don't want to mess with them. - """ - location = pkg_resources.working_set.by_key[name].location - return os.path.commonprefix([location, sys.prefix]) == sys.prefix - - -def _is_up_to_date(distro, version): - # This is done in strings to avoid type mismatches caused by vendering. - return str(version) == str(packaging.version.parse(distro.version)) - - -GroupCollection = collections.namedtuple("GroupCollection", [ - "uptodate", "outdated", "noremove", "unneeded", -]) - - -def _group_installed_names(packages): - """Group locally installed packages based on given specifications. - - `packages` is a name-package mapping that are used as baseline to - determine how the installed package should be grouped. - - Returns a 3-tuple of disjoint sets, all containing names of installed - packages: - - * `uptodate`: These match the specifications. - * `outdated`: These installations are specified, but don't match the - specifications in `packages`. - * `unneeded`: These are installed, but not specified in `packages`. - """ - groupcoll = GroupCollection(set(), set(), set(), set()) - - for distro in pkg_resources.working_set: - name = distro.key - try: - package = packages[name] - except KeyError: - groupcoll.unneeded.add(name) - continue - - r = requirementslib.Requirement.from_pipfile(name, package) - if not r.is_named: - # Always mark non-named. I think pip does something similar? - groupcoll.outdated.add(name) - elif not _is_up_to_date(distro, r.get_version()): - groupcoll.outdated.add(name) - else: - groupcoll.uptodate.add(name) - - return groupcoll - - -@contextlib.contextmanager -def _remove_package(name): - if name is None or not _is_installation_local(name): - yield - return - r = requirementslib.Requirement.from_line(name) - with uninstall_requirement(r.as_ireq(), auto_confirm=True, verbose=False): - yield - - -def _get_packages(lockfile, default, develop): - # Don't need to worry about duplicates because only extras can differ. - # Extras don't matter because they only affect dependencies, and we - # don't install dependencies anyway! - packages = {} - if default: - packages.update(lockfile.default._data) - if develop: - packages.update(lockfile.develop._data) - return packages - - -def _build_paths(): - """Prepare paths for distlib.wheel.Wheel to install into. - """ - paths = sysconfig.get_paths() - return { - "prefix": sys.prefix, - "data": paths["data"], - "scripts": paths["scripts"], - "headers": paths["include"], - "purelib": paths["purelib"], - "platlib": paths["platlib"], - } - - -PROTECTED_FROM_CLEAN = {"setuptools", "pip"} - - -def _clean(names): - for name in names: - if name in PROTECTED_FROM_CLEAN: - continue - with _remove_package(name): - pass - - -class Synchronizer(object): - """Helper class to install packages from a project's lock file. - """ - def __init__(self, project, default, develop, clean_unneeded): - self._root = project.root # Only for repr. - self.packages = _get_packages(project.lockfile, default, develop) - self.sources = project.lockfile.meta.sources._data - self.paths = _build_paths() - self.clean_unneeded = clean_unneeded - - def __repr__(self): - return "<{0} @ {1!r}>".format(type(self).__name__, self._root) - - def sync(self): - groupcoll = _group_installed_names(self.packages) - - installed = set() - updated = set() - cleaned = set() - - # TODO: Show a prompt to confirm cleaning. We will need to implement a - # reporter pattern for this as well. - if self.clean_unneeded: - cleaned.update(groupcoll.unneeded) - _clean(cleaned) - - # TODO: Specify installation order? (pypa/pipenv#2274) - installers = [] - for name, package in self.packages.items(): - r = requirementslib.Requirement.from_pipfile(name, package) - name = r.normalized_name - if name in groupcoll.uptodate: - continue - markers = r.markers - if markers and not packaging.markers.Marker(markers).evaluate(): - continue - r.markers = None - if r.editable: - installer = EditableInstaller(r) - else: - installer = WheelInstaller(r, self.sources, self.paths) - try: - installer.prepare() - except Exception as e: - if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): - raise - print("failed to prepare {0!r}: {1}".format( - r.as_line(include_hashes=False), e, - )) - else: - installers.append((name, installer)) - - for name, installer in installers: - if name in groupcoll.outdated: - name_to_remove = name - else: - name_to_remove = None - try: - with _remove_package(name_to_remove): - installer.install() - except Exception as e: - if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"): - raise - print("failed to install {0!r}: {1}".format( - r.as_line(include_hashes=False), e, - )) - continue - if name in groupcoll.outdated or name in groupcoll.noremove: - updated.add(name) - else: - installed.add(name) - - return installed, updated, cleaned - - -class Cleaner(object): - """Helper class to clean packages not in a project's lock file. - """ - def __init__(self, project, default, develop): - self._root = project.root # Only for repr. - self.packages = _get_packages(project.lockfile, default, develop) - - def __repr__(self): - return "<{0} @ {1!r}>".format(type(self).__name__, self._root) - - def clean(self): - groupcoll = _group_installed_names(self.packages) - _clean(groupcoll.unneeded) - return groupcoll.unneeded diff --git a/pipenv/vendor/passa/traces.py b/pipenv/vendor/passa/traces.py deleted file mode 100644 index 9715db975b..0000000000 --- a/pipenv/vendor/passa/traces.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - - -def _trace_visit_vertex(graph, current, target, visited, path, paths): - if current == target: - paths.append(path) - return - for v in graph.iter_children(current): - if v == current or v in visited: - continue - next_path = path + [current] - next_visited = visited | {current} - _trace_visit_vertex(graph, v, target, next_visited, next_path, paths) - - -def trace_graph(graph): - """Build a collection of "traces" for each package. - - A trace is a list of names that eventually leads to the package. For - example, if A and B are root dependencies, A depends on C and D, B - depends on C, and C depends on D, the return value would be like:: - - { - None: [], - "A": [None], - "B": [None], - "C": [[None, "A"], [None, "B"]], - "D": [[None, "B", "C"], [None, "A"]], - } - """ - result = {None: []} - for vertex in graph: - result[vertex] = [] - for root in graph.iter_children(None): - paths = [] - _trace_visit_vertex(graph, root, vertex, {None}, [None], paths) - result[vertex].extend(paths) - return result diff --git a/pipenv/vendor/passa/utils.py b/pipenv/vendor/passa/utils.py deleted file mode 100644 index 1028db10ac..0000000000 --- a/pipenv/vendor/passa/utils.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding=utf-8 -*- - -from __future__ import absolute_import, unicode_literals - - -def identify_requirment(r): - """Produce an identifier for a requirement to use in the resolver. - - Note that we are treating the same package with different extras as - distinct. This allows semantics like "I only want this extra in - development, not production". - - This also makes the resolver's implementation much simpler, with the minor - costs of possibly needing a few extra resolution steps if we happen to have - the same package apprearing multiple times. - """ - return "{0}{1}".format(r.normalized_name, r.extras_as_pip) - - -def get_pinned_version(ireq): - """Get the pinned version of an InstallRequirement. - - An InstallRequirement is considered pinned if: - - - Is not editable - - It has exactly one specifier - - That specifier is "==" - - The version does not contain a wildcard - - Examples: - django==1.8 # pinned - django>1.8 # NOT pinned - django~=1.8 # NOT pinned - django==1.* # NOT pinned - - Raises `TypeError` if the input is not a valid InstallRequirement, or - `ValueError` if the InstallRequirement is not pinned. - """ - try: - specifier = ireq.specifier - except AttributeError: - raise TypeError("Expected InstallRequirement, not {}".format( - type(ireq).__name__, - )) - - if ireq.editable: - raise ValueError("InstallRequirement is editable") - if not specifier: - raise ValueError("InstallRequirement has no version specification") - if len(specifier._specs) != 1: - raise ValueError("InstallRequirement has multiple specifications") - - op, version = next(iter(specifier._specs))._spec - if op not in ('==', '===') or version.endswith('.*'): - raise ValueError("InstallRequirement not pinned (is {0!r})".format( - op + version, - )) - - return version - - -def is_pinned(ireq): - """Returns whether an InstallRequirement is a "pinned" requirement. - - An InstallRequirement is considered pinned if: - - - Is not editable - - It has exactly one specifier - - That specifier is "==" - - The version does not contain a wildcard - - Examples: - django==1.8 # pinned - django>1.8 # NOT pinned - django~=1.8 # NOT pinned - django==1.* # NOT pinned - """ - try: - get_pinned_version(ireq) - except (TypeError, ValueError): - return False - return True - - -def filter_sources(requirement, sources): - """Return a filtered list of sources for this requirement. - - This considers the index specified by the requirement, and returns only - matching source entries if there is at least one. - """ - if not sources or not requirement.index: - return sources - filtered_sources = [ - source for source in sources - if source.get("name") == requirement.index - ] - return filtered_sources or sources diff --git a/pipenv/vendor/passa/vcs.py b/pipenv/vendor/passa/vcs.py deleted file mode 100644 index 23d033d3bb..0000000000 --- a/pipenv/vendor/passa/vcs.py +++ /dev/null @@ -1,37 +0,0 @@ -import os - -from pip_shims import VcsSupport - -from .utils import cheesy_temporary_directory, mkdir_p - - -def _obtrain_ref(vcs_obj, src_dir, name, rev=None): - target_dir = os.path.join(src_dir, name) - target_rev = vcs_obj.make_rev_options(rev) - if not os.path.exists(target_dir): - vcs_obj.obtain(target_dir) - if (not vcs_obj.is_commit_id_equal(target_dir, rev) and - not vcs_obj.is_commit_id_equal(target_dir, target_rev)): - vcs_obj.update(target_dir, target_rev) - return vcs_obj.get_revision(target_dir) - - -def _get_src(): - src = os.environ.get("PIP_SRC") - if src: - return src - virtual_env = os.environ.get("VIRTUAL_ENV") - if virtual_env: - return os.path.join(virtual_env, "src") - temp_src = cheesy_temporary_directory(prefix='passa-src') - return temp_src - - -def set_ref(requirement): - backend = VcsSupport()._registry.get(requirement.vcs) - vcs = backend(url=requirement.req.vcs_uri) - src = _get_src() - mkdir_p(src, mode=0o775) - name = requirement.normalized_name - ref = _obtrain_ref(vcs, src, name, rev=requirement.req.ref) - requirement.req.ref = ref From 6c2db694ce8d48f3c731890c13cb1d75bb45a565 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:30:51 -0400 Subject: [PATCH 11/35] Update to pip 18 Signed-off-by: Dan Ryan --- pipenv/patched/notpip/LICENSE.txt | 2 +- pipenv/patched/notpip/__init__.py | 2 +- pipenv/patched/notpip/__main__.py | 2 +- pipenv/patched/notpip/_internal/__init__.py | 72 +- .../patched/notpip/_internal/basecommand.py | 151 +- pipenv/patched/notpip/_internal/build_env.py | 50 +- pipenv/patched/notpip/_internal/cmdoptions.py | 14 +- .../notpip/_internal/commands/__init__.py | 4 +- .../notpip/_internal/commands/check.py | 1 - .../notpip/_internal/commands/download.py | 5 +- .../notpip/_internal/commands/install.py | 22 +- .../patched/notpip/_internal/commands/list.py | 43 +- .../notpip/_internal/commands/search.py | 2 +- .../patched/notpip/_internal/commands/show.py | 6 +- .../notpip/_internal/commands/uninstall.py | 6 + .../notpip/_internal/commands/wheel.py | 6 +- pipenv/patched/notpip/_internal/compat.py | 6 +- .../patched/notpip/_internal/configuration.py | 4 +- pipenv/patched/notpip/_internal/download.py | 3 +- pipenv/patched/notpip/_internal/index.py | 73 +- pipenv/patched/notpip/_internal/locations.py | 2 +- .../notpip/_internal/models/__init__.py | 6 +- .../notpip/_internal/operations/check.py | 80 +- .../notpip/_internal/operations/freeze.py | 9 +- .../notpip/_internal/operations/prepare.py | 103 +- .../patched/notpip/_internal/req/__init__.py | 2 +- .../notpip/_internal/req/req_install.py | 473 +++--- .../patched/notpip/_internal/req/req_set.py | 3 - .../notpip/_internal/req/req_tracker.py | 76 + .../notpip/_internal/req/req_uninstall.py | 8 +- pipenv/patched/notpip/_internal/resolve.py | 3 +- .../notpip/_internal/utils/deprecation.py | 100 +- .../patched/notpip/_internal/utils/logging.py | 95 +- pipenv/patched/notpip/_internal/utils/misc.py | 66 +- .../notpip/_internal/utils/outdated.py | 56 +- .../notpip/_internal/utils/packaging.py | 9 +- .../patched/notpip/_internal/utils/typing.py | 4 +- pipenv/patched/notpip/_internal/utils/ui.py | 2 +- .../patched/notpip/_internal/vcs/__init__.py | 212 +-- pipenv/patched/notpip/_internal/vcs/bazaar.py | 29 +- pipenv/patched/notpip/_internal/vcs/git.py | 74 +- .../patched/notpip/_internal/vcs/mercurial.py | 27 +- .../notpip/_internal/vcs/subversion.py | 67 +- pipenv/patched/notpip/_internal/wheel.py | 36 +- pipenv/patched/notpip/_vendor/__init__.py | 3 +- .../notpip/_vendor/cachecontrol/__init__.py | 6 +- .../notpip/_vendor/cachecontrol/_cmd.py | 17 +- .../notpip/_vendor/cachecontrol/adapter.py | 55 +- .../notpip/_vendor/cachecontrol/cache.py | 6 +- .../_vendor/cachecontrol/caches/file_cache.py | 31 +- .../cachecontrol/caches/redis_cache.py | 12 +- .../notpip/_vendor/cachecontrol/controller.py | 206 ++- .../_vendor/cachecontrol/filewrapper.py | 6 +- .../notpip/_vendor/cachecontrol/heuristics.py | 37 +- .../notpip/_vendor/cachecontrol/serialize.py | 38 +- .../notpip/_vendor/cachecontrol/wrapper.py | 24 +- .../notpip/_vendor/certifi/__init__.py | 2 +- .../patched/notpip/_vendor/certifi/cacert.pem | 33 - pipenv/patched/notpip/_vendor/distlib/t32.exe | Bin pipenv/patched/notpip/_vendor/distlib/t64.exe | Bin pipenv/patched/notpip/_vendor/distlib/w32.exe | Bin pipenv/patched/notpip/_vendor/distlib/w64.exe | Bin pipenv/patched/notpip/_vendor/distro.py | 187 ++- .../patched/notpip/_vendor/idna/LICENSE.rst | 2 +- pipenv/patched/notpip/_vendor/idna/core.py | 40 +- .../patched/notpip/_vendor/idna/idnadata.py | 398 ++++- .../notpip/_vendor/idna/package_data.py | 2 +- .../patched/notpip/_vendor/idna/uts46data.py | 1275 ++++++++++++----- pipenv/patched/notpip/_vendor/ipaddress.py | 2 +- .../notpip/_vendor/pkg_resources/__init__.py | 29 +- .../notpip/_vendor/progress/__init__.py | 2 +- pipenv/patched/notpip/_vendor/progress/bar.py | 8 +- .../notpip/_vendor/progress/helpers.py | 14 +- .../patched/notpip/_vendor/pytoml/parser.py | 52 +- .../patched/notpip/_vendor/requests/LICENSE | 2 +- .../notpip/_vendor/requests/__init__.py | 19 +- .../notpip/_vendor/requests/__version__.py | 6 +- .../notpip/_vendor/requests/adapters.py | 17 +- pipenv/patched/notpip/_vendor/requests/api.py | 2 +- .../patched/notpip/_vendor/requests/auth.py | 12 + .../patched/notpip/_vendor/requests/compat.py | 2 + .../notpip/_vendor/requests/cookies.py | 10 +- .../notpip/_vendor/requests/exceptions.py | 4 + .../patched/notpip/_vendor/requests/help.py | 2 +- .../patched/notpip/_vendor/requests/models.py | 20 +- .../notpip/_vendor/requests/sessions.py | 18 +- .../notpip/_vendor/requests/status_codes.py | 39 +- .../notpip/_vendor/requests/structures.py | 10 +- .../patched/notpip/_vendor/requests/utils.py | 116 +- .../notpip/_vendor/urllib3/__init__.py | 2 +- .../notpip/_vendor/urllib3/_collections.py | 23 +- .../notpip/_vendor/urllib3/connection.py | 40 +- .../notpip/_vendor/urllib3/connectionpool.py | 27 +- .../contrib/_securetransport/low_level.py | 7 +- .../_vendor/urllib3/contrib/appengine.py | 11 +- .../_vendor/urllib3/contrib/pyopenssl.py | 22 +- .../urllib3/contrib/securetransport.py | 32 +- .../notpip/_vendor/urllib3/contrib/socks.py | 4 + .../notpip/_vendor/urllib3/exceptions.py | 2 +- .../patched/notpip/_vendor/urllib3/fields.py | 2 +- .../notpip/_vendor/urllib3/filepost.py | 10 +- .../notpip/_vendor/urllib3/poolmanager.py | 13 +- .../patched/notpip/_vendor/urllib3/request.py | 8 +- .../notpip/_vendor/urllib3/response.py | 100 +- .../notpip/_vendor/urllib3/util/connection.py | 14 +- .../notpip/_vendor/urllib3/util/queue.py | 21 + .../notpip/_vendor/urllib3/util/retry.py | 12 +- .../notpip/_vendor/urllib3/util/selectors.py | 581 -------- .../notpip/_vendor/urllib3/util/ssl_.py | 83 +- .../notpip/_vendor/urllib3/util/wait.py | 191 ++- pipenv/patched/notpip/_vendor/vendor.txt | 20 +- 111 files changed, 3536 insertions(+), 2471 deletions(-) create mode 100644 pipenv/patched/notpip/_internal/req/req_tracker.py mode change 100755 => 100644 pipenv/patched/notpip/_vendor/distlib/t32.exe mode change 100755 => 100644 pipenv/patched/notpip/_vendor/distlib/t64.exe mode change 100755 => 100644 pipenv/patched/notpip/_vendor/distlib/w32.exe mode change 100755 => 100644 pipenv/patched/notpip/_vendor/distlib/w64.exe create mode 100644 pipenv/patched/notpip/_vendor/urllib3/util/queue.py delete mode 100644 pipenv/patched/notpip/_vendor/urllib3/util/selectors.py diff --git a/pipenv/patched/notpip/LICENSE.txt b/pipenv/patched/notpip/LICENSE.txt index f63eac3d66..d3379faca6 100644 --- a/pipenv/patched/notpip/LICENSE.txt +++ b/pipenv/patched/notpip/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file) +Copyright (c) 2008-2018 The pip developers (see AUTHORS.txt file) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/pipenv/patched/notpip/__init__.py b/pipenv/patched/notpip/__init__.py index ab64964188..9227d0eadc 100644 --- a/pipenv/patched/notpip/__init__.py +++ b/pipenv/patched/notpip/__init__.py @@ -1 +1 @@ -__version__ = "10.0.1" +__version__ = "18.0" diff --git a/pipenv/patched/notpip/__main__.py b/pipenv/patched/notpip/__main__.py index e63d7ae94c..a4879980b4 100644 --- a/pipenv/patched/notpip/__main__.py +++ b/pipenv/patched/notpip/__main__.py @@ -13,7 +13,7 @@ path = os.path.dirname(os.path.dirname(__file__)) sys.path.insert(0, path) -from pipenv.patched.notpip._internal import main as _main # noqa +from pipenv.patched.notpip._internal import main as _main # isort:skip # noqa if __name__ == '__main__': sys.exit(_main()) diff --git a/pipenv/patched/notpip/_internal/__init__.py b/pipenv/patched/notpip/_internal/__init__.py index dadf6e118c..dcd0937e25 100644 --- a/pipenv/patched/notpip/_internal/__init__.py +++ b/pipenv/patched/notpip/_internal/__init__.py @@ -116,6 +116,15 @@ def autocomplete(): options = [(x, v) for (x, v) in options if x not in prev_opts] # filter options by current input options = [(k, v) for k, v in options if k.startswith(current)] + # get completion type given cwords and available subcommand options + completion_type = get_path_completion_type( + cwords, cword, subcommand.parser.option_list_all, + ) + # get completion files and directories if ``completion_type`` is + # ````, ```` or ```` + if completion_type: + options = auto_complete_paths(current, completion_type) + options = ((opt, 0) for opt in options) for option in options: opt_label = option[0] # append '=' to options which require args @@ -124,19 +133,74 @@ def autocomplete(): print(opt_label) else: # show main parser options only when necessary - if current.startswith('-') or current.startswith('--'): - opts = [i.option_list for i in parser.option_groups] - opts.append(parser.option_list) - opts = (o for it in opts for o in it) + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + opts = (o for it in opts for o in it) + if current.startswith('-'): for opt in opts: if opt.help != optparse.SUPPRESS_HELP: subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, opts) + if completion_type: + subcommands = auto_complete_paths(current, completion_type) print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1) +def get_path_completion_type(cwords, cword, opts): + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith('-'): + return + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split('/'): + if cwords[cword - 2].split('=')[0] == o: + if any(x in ('path', 'file', 'dir') + for x in opt.metavar.split('/')): + return opt.metavar + + +def auto_complete_paths(current, completion_type): + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(`file`, `path` or `dir`)i + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = (x for x in os.listdir(current_path) + if os.path.normcase(x).startswith(filename)) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ```` after option + # complete directories when there is ````, ```` or + # ````after option + if completion_type != 'dir' and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, '') + + def create_main_parser(): parser_kw = { 'usage': '\n%prog [options]', diff --git a/pipenv/patched/notpip/_internal/basecommand.py b/pipenv/patched/notpip/_internal/basecommand.py index e35bf3d12a..60199d5532 100644 --- a/pipenv/patched/notpip/_internal/basecommand.py +++ b/pipenv/patched/notpip/_internal/basecommand.py @@ -6,13 +6,11 @@ import optparse import os import sys -import warnings from pipenv.patched.notpip._internal import cmdoptions from pipenv.patched.notpip._internal.baseparser import ( ConfigOptionParser, UpdatingDefaultsHelpFormatter, ) -from pipenv.patched.notpip._internal.compat import WINDOWS from pipenv.patched.notpip._internal.download import PipSession from pipenv.patched.notpip._internal.exceptions import ( BadCommand, CommandError, InstallationError, PreviousBuildDirError, @@ -26,14 +24,13 @@ ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, ) -from pipenv.patched.notpip._internal.utils import deprecation -from pipenv.patched.notpip._internal.utils.logging import IndentingFormatter +from pipenv.patched.notpip._internal.utils.logging import setup_logging from pipenv.patched.notpip._internal.utils.misc import get_prog, normalize_path from pipenv.patched.notpip._internal.utils.outdated import pip_version_check from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional + from typing import Optional # noqa: F401 __all__ = ['Command'] @@ -45,7 +42,6 @@ class Command(object): usage = None # type: Optional[str] hidden = False # type: bool ignore_require_venv = False # type: bool - log_streams = ("ext://sys.stdout", "ext://sys.stderr") def __init__(self, isolated=False): parser_kw = { @@ -117,96 +113,15 @@ def main(self, args): # Set verbosity so that it can be used elsewhere. self.verbosity = options.verbose - options.quiet - if self.verbosity >= 1: - level = "DEBUG" - elif self.verbosity == -1: - level = "WARNING" - elif self.verbosity == -2: - level = "ERROR" - elif self.verbosity <= -3: - level = "CRITICAL" - else: - level = "INFO" - - # The root logger should match the "console" level *unless* we - # specified "--log" to send debug logs to a file. - root_level = level - if options.log: - root_level = "DEBUG" - - logger_class = "pip._internal.utils.logging.ColorizedStreamHandler" - handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler" - - logging.config.dictConfig({ - "version": 1, - "disable_existing_loggers": False, - "filters": { - "exclude_warnings": { - "()": "pip._internal.utils.logging.MaxLevelFilter", - "level": logging.WARNING, - }, - }, - "formatters": { - "indent": { - "()": IndentingFormatter, - "format": "%(message)s", - }, - }, - "handlers": { - "console": { - "level": level, - "class": logger_class, - "no_color": options.no_color, - "stream": self.log_streams[0], - "filters": ["exclude_warnings"], - "formatter": "indent", - }, - "console_errors": { - "level": "WARNING", - "class": logger_class, - "no_color": options.no_color, - "stream": self.log_streams[1], - "formatter": "indent", - }, - "user_log": { - "level": "DEBUG", - "class": handler_class, - "filename": options.log or "/dev/null", - "delay": True, - "formatter": "indent", - }, - }, - "root": { - "level": root_level, - "handlers": list(filter(None, [ - "console", - "console_errors", - "user_log" if options.log else None, - ])), - }, - # Disable any logging besides WARNING unless we have DEBUG level - # logging enabled. These use both pip._vendor and the bare names - # for the case where someone unbundles our libraries. - "loggers": { - name: { - "level": ( - "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" - ) - } for name in [ - "pip._vendor", "distlib", "requests", "urllib3" - ] - }, - }) - - if sys.version_info[:2] == (3, 3): - warnings.warn( - "Python 3.3 supported has been deprecated and support for it " - "will be dropped in the future. Please upgrade your Python.", - deprecation.RemovedInPip11Warning, - ) + setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) - # TODO: try to get these passing down from the command? - # without resorting to os.environ to hold these. + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. if options.no_input: os.environ['PIP_NO_INPUT'] = '1' @@ -222,8 +137,6 @@ def main(self, args): ) sys.exit(VIRTUALENV_NOT_FOUND) - original_root_handlers = set(logging.root.handlers) - try: status = self.run(options, args) # FIXME: all commands should return an exit status @@ -250,23 +163,27 @@ def main(self, args): logger.debug('Exception information:', exc_info=True) return ERROR - except: + except BaseException: logger.critical('Exception:', exc_info=True) return UNKNOWN_ERROR finally: # Check if we're using the latest version of pip available - if (not options.disable_pip_version_check and not - getattr(options, "no_index", False)): - with self._build_session( - options, - retries=0, - timeout=min(5, options.timeout)) as session: + skip_version_check = ( + options.disable_pip_version_check or + getattr(options, "no_index", False) + ) + if not skip_version_check: + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout) + ) + with session: pip_version_check(session, options) - # Avoid leaking loggers - for handler in set(logging.root.handlers) - original_root_handlers: - # this method benefit from the Logger class internal lock - logging.root.removeHandler(handler) + + # Shutdown the logging module + logging.shutdown() return SUCCESS @@ -330,23 +247,6 @@ def populate_requirement_set(requirement_set, args, options, finder, 'You must give at least one requirement to %(name)s ' '(see "pip help %(name)s")' % opts) - # On Windows, any operation modifying pip should be run as: - # python -m pip ... - # See https://github.com/pypa/pip/issues/1299 for more discussion - should_show_use_python_msg = ( - WINDOWS and - requirement_set.has_requirement("pip") and - os.path.basename(sys.argv[0]).startswith("pip") - ) - if should_show_use_python_msg: - new_command = [ - sys.executable, "-m", "pip" - ] + sys.argv[1:] - raise CommandError( - 'To modify pip, please run the following command:\n{}' - .format(" ".join(new_command)) - ) - def _build_package_finder(self, options, session, platform=None, python_versions=None, abi=None, implementation=None): @@ -370,4 +270,5 @@ def _build_package_finder(self, options, session, versions=python_versions, abi=abi, implementation=implementation, + prefer_binary=options.prefer_binary, ) diff --git a/pipenv/patched/notpip/_internal/build_env.py b/pipenv/patched/notpip/_internal/build_env.py index c41696ef19..1d351b5c42 100644 --- a/pipenv/patched/notpip/_internal/build_env.py +++ b/pipenv/patched/notpip/_internal/build_env.py @@ -1,28 +1,32 @@ """Build Environment used for isolation during sdist building """ +import logging import os +import sys from distutils.sysconfig import get_python_lib from sysconfig import get_paths +from pipenv.patched.notpip._internal.utils.misc import call_subprocess from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory +from pipenv.patched.notpip._internal.utils.ui import open_spinner + +logger = logging.getLogger(__name__) class BuildEnvironment(object): """Creates and manages an isolated environment to install build deps """ - def __init__(self, no_clean): + def __init__(self): self._temp_dir = TempDirectory(kind="build-env") - self._no_clean = no_clean + self._temp_dir.create() @property def path(self): return self._temp_dir.path def __enter__(self): - self._temp_dir.create() - self.save_path = os.environ.get('PATH', None) self.save_pythonpath = os.environ.get('PYTHONPATH', None) self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None) @@ -58,9 +62,6 @@ def __enter__(self): return self.path def __exit__(self, exc_type, exc_val, exc_tb): - if not self._no_clean: - self._temp_dir.cleanup() - def restore_var(varname, old_value): if old_value is None: os.environ.pop(varname, None) @@ -74,12 +75,42 @@ def restore_var(varname, old_value): def cleanup(self): self._temp_dir.cleanup() + def install_requirements(self, finder, requirements, message): + args = [ + sys.executable, '-m', 'pip', 'install', '--ignore-installed', + '--no-user', '--prefix', self.path, '--no-warn-script-location', + ] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append('-v') + for format_control in ('no_binary', 'only_binary'): + formats = getattr(finder.format_control, format_control) + args.extend(('--' + format_control.replace('_', '-'), + ','.join(sorted(formats or {':none:'})))) + if finder.index_urls: + args.extend(['-i', finder.index_urls[0]]) + for extra_index in finder.index_urls[1:]: + args.extend(['--extra-index-url', extra_index]) + else: + args.append('--no-index') + for link in finder.find_links: + args.extend(['--find-links', link]) + for _, host, _ in finder.secure_origins: + args.extend(['--trusted-host', host]) + if finder.allow_all_prereleases: + args.append('--pre') + if finder.process_dependency_links: + args.append('--process-dependency-links') + args.append('--') + args.extend(requirements) + with open_spinner(message) as spinner: + call_subprocess(args, show_stdout=False, spinner=spinner) + class NoOpBuildEnvironment(BuildEnvironment): """A no-op drop-in replacement for BuildEnvironment """ - def __init__(self, no_clean): + def __init__(self): pass def __enter__(self): @@ -90,3 +121,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): def cleanup(self): pass + + def install_requirements(self, finder, requirements, message): + raise NotImplementedError() diff --git a/pipenv/patched/notpip/_internal/cmdoptions.py b/pipenv/patched/notpip/_internal/cmdoptions.py index 0b1c031a7d..c25e769fb3 100644 --- a/pipenv/patched/notpip/_internal/cmdoptions.py +++ b/pipenv/patched/notpip/_internal/cmdoptions.py @@ -17,13 +17,13 @@ FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, ) from pipenv.patched.notpip._internal.locations import USER_CACHE_DIR, src_prefix -from pipenv.patched.notpip._internal.models import PyPI +from pipenv.patched.notpip._internal.models.index import PyPI from pipenv.patched.notpip._internal.utils.hashes import STRONG_HASHES from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING from pipenv.patched.notpip._internal.utils.ui import BAR_TYPES if MYPY_CHECK_RUNNING: - from typing import Any + from typing import Any # noqa: F401 def make_option_group(group, parser): @@ -406,6 +406,16 @@ def only_binary(): ) +def prefer_binary(): + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help="Prefer older binary packages over newer source packages." + ) + + cache_dir = partial( Option, "--cache-dir", diff --git a/pipenv/patched/notpip/_internal/commands/__init__.py b/pipenv/patched/notpip/_internal/commands/__init__.py index 101fa37360..140c460936 100644 --- a/pipenv/patched/notpip/_internal/commands/__init__.py +++ b/pipenv/patched/notpip/_internal/commands/__init__.py @@ -20,8 +20,8 @@ from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import List, Type - from pipenv.patched.notpip._internal.basecommand import Command + from typing import List, Type # noqa: F401 + from pipenv.patched.notpip._internal.basecommand import Command # noqa: F401 commands_order = [ InstallCommand, diff --git a/pipenv/patched/notpip/_internal/commands/check.py b/pipenv/patched/notpip/_internal/commands/check.py index c9acaff5de..cd5ffb5f9f 100644 --- a/pipenv/patched/notpip/_internal/commands/check.py +++ b/pipenv/patched/notpip/_internal/commands/check.py @@ -4,7 +4,6 @@ from pipenv.patched.notpip._internal.operations.check import ( check_package_set, create_package_set_from_installed, ) -from pipenv.patched.notpip._internal.utils.misc import get_installed_distributions logger = logging.getLogger(__name__) diff --git a/pipenv/patched/notpip/_internal/commands/download.py b/pipenv/patched/notpip/_internal/commands/download.py index e2cdae5efe..63d91b0407 100644 --- a/pipenv/patched/notpip/_internal/commands/download.py +++ b/pipenv/patched/notpip/_internal/commands/download.py @@ -9,6 +9,7 @@ from pipenv.patched.notpip._internal.index import FormatControl from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req import RequirementSet +from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker from pipenv.patched.notpip._internal.resolve import Resolver from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner from pipenv.patched.notpip._internal.utils.misc import ensure_dir, normalize_path @@ -52,6 +53,7 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.global_options()) cmd_opts.add_option(cmdoptions.no_binary()) cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) cmd_opts.add_option(cmdoptions.src()) cmd_opts.add_option(cmdoptions.pre()) cmd_opts.add_option(cmdoptions.no_clean()) @@ -179,7 +181,7 @@ def run(self, options, args): ) options.cache_dir = None - with TempDirectory( + with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="download" ) as directory: @@ -203,6 +205,7 @@ def run(self, options, args): wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, + req_tracker=req_tracker, ) resolver = Resolver( diff --git a/pipenv/patched/notpip/_internal/commands/install.py b/pipenv/patched/notpip/_internal/commands/install.py index 136310de43..ebdf07d72b 100644 --- a/pipenv/patched/notpip/_internal/commands/install.py +++ b/pipenv/patched/notpip/_internal/commands/install.py @@ -7,6 +7,8 @@ import shutil from optparse import SUPPRESS_HELP +from pipenv.patched.notpip._vendor import pkg_resources + from pipenv.patched.notpip._internal import cmdoptions from pipenv.patched.notpip._internal.basecommand import RequirementCommand from pipenv.patched.notpip._internal.cache import WheelCache @@ -17,10 +19,14 @@ from pipenv.patched.notpip._internal.operations.check import check_install_conflicts from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req import RequirementSet, install_given_reqs +from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker from pipenv.patched.notpip._internal.resolve import Resolver from pipenv.patched.notpip._internal.status_codes import ERROR from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner -from pipenv.patched.notpip._internal.utils.misc import ensure_dir, get_installed_version +from pipenv.patched.notpip._internal.utils.misc import ( + ensure_dir, get_installed_version, + protect_pip_from_modification_on_windows, +) from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.wheel import WheelBuilder @@ -183,6 +189,7 @@ def __init__(self, *args, **kw): cmd_opts.add_option(cmdoptions.no_binary()) cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) cmd_opts.add_option(cmdoptions.no_clean()) cmd_opts.add_option(cmdoptions.require_hashes()) cmd_opts.add_option(cmdoptions.progress_bar()) @@ -254,7 +261,7 @@ def run(self, options, args): ) options.cache_dir = None - with TempDirectory( + with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="install" ) as directory: requirement_set = RequirementSet( @@ -273,6 +280,7 @@ def run(self, options, args): wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, + req_tracker=req_tracker, ) resolver = Resolver( @@ -290,6 +298,10 @@ def run(self, options, args): ) resolver.resolve(requirement_set) + protect_pip_from_modification_on_windows( + modifying_pip=requirement_set.has_requirement("pip") + ) + # If caching is disabled or wheel is not installed don't # try to build wheels. if wheel and options.cache_dir: @@ -335,20 +347,22 @@ def run(self, options, args): use_user_site=options.use_user_site, ) - possible_lib_locations = get_lib_location_guesses( + lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) + working_set = pkg_resources.WorkingSet(lib_locations) + reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( - req.name, possible_lib_locations + req.name, working_set=working_set ) if installed_version: item += '-' + installed_version diff --git a/pipenv/patched/notpip/_internal/commands/list.py b/pipenv/patched/notpip/_internal/commands/list.py index dc3c0f2c8d..99aee99fed 100644 --- a/pipenv/patched/notpip/_internal/commands/list.py +++ b/pipenv/patched/notpip/_internal/commands/list.py @@ -2,7 +2,6 @@ import json import logging -import warnings from pipenv.patched.notpip._vendor import six from pipenv.patched.notpip._vendor.six.moves import zip_longest @@ -11,7 +10,6 @@ from pipenv.patched.notpip._internal.cmdoptions import index_group, make_option_group from pipenv.patched.notpip._internal.exceptions import CommandError from pipenv.patched.notpip._internal.index import PackageFinder -from pipenv.patched.notpip._internal.utils.deprecation import RemovedInPip11Warning from pipenv.patched.notpip._internal.utils.misc import ( dist_is_editable, get_installed_distributions, ) @@ -78,9 +76,9 @@ def __init__(self, *args, **kw): action='store', dest='list_format', default="columns", - choices=('legacy', 'columns', 'freeze', 'json'), + choices=('columns', 'freeze', 'json'), help="Select the output format among: columns (default), freeze, " - "json, or legacy.", + "or json", ) cmd_opts.add_option( @@ -123,13 +121,6 @@ def _build_package_finder(self, options, index_urls, session): ) def run(self, options, args): - if options.list_format == "legacy": - warnings.warn( - "The legacy format has been deprecated and will be removed " - "in the future.", - RemovedInPip11Warning, - ) - if options.outdated and options.uptodate: raise CommandError( "Options --outdated and --uptodate cannot be combined.") @@ -208,30 +199,6 @@ def iter_packages_latest_infos(self, packages, options): dist.latest_filetype = typ yield dist - def output_legacy(self, dist, options): - if options.verbose >= 1: - return '%s (%s, %s, %s)' % ( - dist.project_name, - dist.version, - dist.location, - get_installer(dist), - ) - elif dist_is_editable(dist): - return '%s (%s, %s)' % ( - dist.project_name, - dist.version, - dist.location, - ) - else: - return '%s (%s)' % (dist.project_name, dist.version) - - def output_legacy_latest(self, dist, options): - return '%s - Latest: %s [%s]' % ( - self.output_legacy(dist, options), - dist.latest_version, - dist.latest_filetype, - ) - def output_package_listing(self, packages, options): packages = sorted( packages, @@ -249,12 +216,6 @@ def output_package_listing(self, packages, options): logger.info("%s==%s", dist.project_name, dist.version) elif options.list_format == 'json': logger.info(format_for_json(packages, options)) - elif options.list_format == "legacy": - for dist in packages: - if options.outdated: - logger.info(self.output_legacy_latest(dist, options)) - else: - logger.info(self.output_legacy(dist, options)) def output_package_listing_columns(self, data, header): # insert the header first: we need to know the size of column names diff --git a/pipenv/patched/notpip/_internal/commands/search.py b/pipenv/patched/notpip/_internal/commands/search.py index 32a111ae29..ac111c1487 100644 --- a/pipenv/patched/notpip/_internal/commands/search.py +++ b/pipenv/patched/notpip/_internal/commands/search.py @@ -15,7 +15,7 @@ from pipenv.patched.notpip._internal.compat import get_terminal_size from pipenv.patched.notpip._internal.download import PipXmlrpcTransport from pipenv.patched.notpip._internal.exceptions import CommandError -from pipenv.patched.notpip._internal.models import PyPI +from pipenv.patched.notpip._internal.models.index import PyPI from pipenv.patched.notpip._internal.status_codes import NO_MATCHES_FOUND from pipenv.patched.notpip._internal.utils.logging import indent_log diff --git a/pipenv/patched/notpip/_internal/commands/show.py b/pipenv/patched/notpip/_internal/commands/show.py index 3bac8b4fcd..8de6b6b8c5 100644 --- a/pipenv/patched/notpip/_internal/commands/show.py +++ b/pipenv/patched/notpip/_internal/commands/show.py @@ -14,7 +14,11 @@ class ShowCommand(Command): - """Show information about one or more installed packages.""" + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ name = 'show' usage = """ %prog [options] ...""" diff --git a/pipenv/patched/notpip/_internal/commands/uninstall.py b/pipenv/patched/notpip/_internal/commands/uninstall.py index 262c2c45d3..45a0eba52f 100644 --- a/pipenv/patched/notpip/_internal/commands/uninstall.py +++ b/pipenv/patched/notpip/_internal/commands/uninstall.py @@ -5,6 +5,7 @@ from pipenv.patched.notpip._internal.basecommand import Command from pipenv.patched.notpip._internal.exceptions import InstallationError from pipenv.patched.notpip._internal.req import InstallRequirement, parse_requirements +from pipenv.patched.notpip._internal.utils.misc import protect_pip_from_modification_on_windows class UninstallCommand(Command): @@ -63,6 +64,11 @@ def run(self, options, args): 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name) ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + for req in reqs_to_uninstall.values(): uninstall_pathset = req.uninstall( auto_confirm=options.yes, verbose=self.verbosity > 0, diff --git a/pipenv/patched/notpip/_internal/commands/wheel.py b/pipenv/patched/notpip/_internal/commands/wheel.py index e97d5988e2..c04d58ed21 100644 --- a/pipenv/patched/notpip/_internal/commands/wheel.py +++ b/pipenv/patched/notpip/_internal/commands/wheel.py @@ -10,6 +10,7 @@ from pipenv.patched.notpip._internal.exceptions import CommandError, PreviousBuildDirError from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req import RequirementSet +from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker from pipenv.patched.notpip._internal.resolve import Resolver from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.wheel import WheelBuilder @@ -57,6 +58,7 @@ def __init__(self, *args, **kw): ) cmd_opts.add_option(cmdoptions.no_binary()) cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) cmd_opts.add_option( '--build-option', dest='build_options', @@ -119,9 +121,10 @@ def run(self, options, args): build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) - with TempDirectory( + with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="wheel" ) as directory: + requirement_set = RequirementSet( require_hashes=options.require_hashes, ) @@ -139,6 +142,7 @@ def run(self, options, args): wheel_download_dir=options.wheel_dir, progress_bar=options.progress_bar, build_isolation=options.build_isolation, + req_tracker=req_tracker, ) resolver = Resolver( diff --git a/pipenv/patched/notpip/_internal/compat.py b/pipenv/patched/notpip/_internal/compat.py index 3407880c52..6e51e32a73 100644 --- a/pipenv/patched/notpip/_internal/compat.py +++ b/pipenv/patched/notpip/_internal/compat.py @@ -164,7 +164,7 @@ def expanduser(path): """ Expand ~ and ~user constructions. - Includes a workaround for http://bugs.python.org/issue14768 + Includes a workaround for https://bugs.python.org/issue14768 """ expanded = os.path.expanduser(path) if path.startswith('~/') and expanded.startswith('//'): @@ -217,7 +217,7 @@ def ioctl_GWINSZ(fd): 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') ) - except: + except Exception: return None if cr == (0, 0): return None @@ -228,7 +228,7 @@ def ioctl_GWINSZ(fd): fd = os.open(os.ctermid(), os.O_RDONLY) cr = ioctl_GWINSZ(fd) os.close(fd) - except: + except Exception: pass if not cr: cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) diff --git a/pipenv/patched/notpip/_internal/configuration.py b/pipenv/patched/notpip/_internal/configuration.py index 7b9b5a4548..3df185f741 100644 --- a/pipenv/patched/notpip/_internal/configuration.py +++ b/pipenv/patched/notpip/_internal/configuration.py @@ -27,7 +27,9 @@ from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple + from typing import ( # noqa: F401 + Any, Dict, Iterable, List, NewType, Optional, Tuple + ) RawConfigParser = configparser.RawConfigParser # Shorthand Kind = NewType("Kind", str) diff --git a/pipenv/patched/notpip/_internal/download.py b/pipenv/patched/notpip/_internal/download.py index f00e398489..06a456449e 100644 --- a/pipenv/patched/notpip/_internal/download.py +++ b/pipenv/patched/notpip/_internal/download.py @@ -31,10 +31,9 @@ from pipenv.patched.notpip._vendor.urllib3.util import IS_PYOPENSSL import pipenv.patched.notpip -from pipenv.patched.notpip._internal.compat import WINDOWS from pipenv.patched.notpip._internal.exceptions import HashMismatch, InstallationError from pipenv.patched.notpip._internal.locations import write_delete_marker_file -from pipenv.patched.notpip._internal.models import PyPI +from pipenv.patched.notpip._internal.models.index import PyPI from pipenv.patched.notpip._internal.utils.encoding import auto_decode from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner from pipenv.patched.notpip._internal.utils.glibc import libc_ver diff --git a/pipenv/patched/notpip/_internal/index.py b/pipenv/patched/notpip/_internal/index.py index 8fa39c4f40..98102f3f07 100644 --- a/pipenv/patched/notpip/_internal/index.py +++ b/pipenv/patched/notpip/_internal/index.py @@ -9,7 +9,6 @@ import posixpath import re import sys -import warnings from collections import namedtuple from pipenv.patched.notpip._vendor import html5lib, requests, six @@ -27,13 +26,13 @@ BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, UnsupportedWheel, ) -from pipenv.patched.notpip._internal.models import PyPI +from pipenv.patched.notpip._internal.models.index import PyPI from pipenv.patched.notpip._internal.pep425tags import get_supported -from pipenv.patched.notpip._internal.utils.deprecation import RemovedInPip11Warning +from pipenv.patched.notpip._internal.utils.deprecation import deprecated from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.misc import ( ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path, - splitext, + remove_auth_from_url, splitext, ) from pipenv.patched.notpip._internal.utils.packaging import check_requires_python from pipenv.patched.notpip._internal.wheel import Wheel, wheel_ext @@ -59,7 +58,7 @@ class InstallationCandidate(object): - def __init__(self, project, version, location, requires_python=''): + def __init__(self, project, version, location, requires_python=None): self.project = project self.version = parse_version(version) self.location = location @@ -109,7 +108,8 @@ class PackageFinder(object): def __init__(self, find_links, index_urls, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None, format_control=None, platform=None, - versions=None, abi=None, implementation=None): + versions=None, abi=None, implementation=None, + prefer_binary=False): """Create a PackageFinder. :param format_control: A FormatControl object or None. Used to control @@ -169,7 +169,7 @@ def __init__(self, find_links, index_urls, allow_all_prereleases=False, # The Session we'll use to make requests self.session = session - # Kenneth's Hack. + # Kenneth's Hack self.extra = None # The valid tags to check potential found wheel candidates against @@ -180,6 +180,9 @@ def __init__(self, find_links, index_urls, allow_all_prereleases=False, impl=implementation, ) + # Do we prefer old, but valid, binary dist over new source dist + self.prefer_binary = prefer_binary + # If we don't have TLS enabled, then WARN if anyplace we're looking # relies on TLS. if not HAS_TLS: @@ -197,7 +200,8 @@ def get_formatted_locations(self): lines = [] if self.index_urls and self.index_urls != [PyPI.simple_url]: lines.append( - "Looking in indexes: {}".format(", ".join(self.index_urls)) + "Looking in indexes: {}".format(", ".join( + remove_auth_from_url(url) for url in self.index_urls)) ) if self.find_links: lines.append( @@ -211,10 +215,12 @@ def add_dependency_links(self, links): # # dependency_links value # # FIXME: also, we should track comes_from (i.e., use Link) if self.process_dependency_links: - warnings.warn( + deprecated( "Dependency Links processing has been deprecated and will be " "removed in a future release.", - RemovedInPip11Warning, + replacement=None, + gone_in="18.2", + issue=4187, ) self.dependency_links.extend(links) @@ -288,7 +294,7 @@ def sort_path(path): return files, urls - def _candidate_sort_key(self, candidate, ignore_compatibility=True): + def _candidate_sort_key(self, candidate, ignore_compatibility=False): """ Function used to generate link sort key for link tuples. The greater the return value, the more preferred it is. @@ -297,12 +303,14 @@ def _candidate_sort_key(self, candidate, ignore_compatibility=True): 1. existing installs 2. wheels ordered via Wheel.support_index_min(self.valid_tags) 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. Note: it was considered to embed this logic into the Link comparison operators, but then different sdist links with the same version, would have to be considered equal """ support_num = len(self.valid_tags) build_tag = tuple() + binary_preference = 0 if candidate.location.is_wheel: # can raise InvalidWheelFilename wheel = Wheel(candidate.location.filename) @@ -311,7 +319,8 @@ def _candidate_sort_key(self, candidate, ignore_compatibility=True): "%s is not a supported wheel for this platform. It " "can't be sorted." % wheel.filename ) - + if self.prefer_binary: + binary_preference = 1 tags = self.valid_tags if not ignore_compatibility else None try: pri = -(wheel.support_index_min(tags=tags)) @@ -324,7 +333,7 @@ def _candidate_sort_key(self, candidate, ignore_compatibility=True): build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) else: # sdist pri = -(support_num) - return (candidate.version, build_tag, pri) + return (binary_preference, candidate.version, build_tag, pri) def _validate_secure_origin(self, logger, location): # Determine if this url used a secure transport mechanism @@ -512,25 +521,22 @@ def find_requirement(self, req, upgrade, ignore_compatibility=False): all_candidates = self.find_all_candidates(req.name) # Filter out anything which doesn't match our specifier - if not ignore_compatibility: - compatible_versions = set( - req.specifier.filter( - # We turn the version object into a str here because otherwise - # when we're debundled but setuptools isn't, Python will see - # packaging.version.Version and - # pkg_resources._vendor.packaging.version.Version as different - # types. This way we'll use a str as a common data interchange - # format. If we stop using the pkg_resources provided specifier - # and start using our own, we can drop the cast to str(). - [str(c.version) for c in all_candidates], - prereleases=( - self.allow_all_prereleases - if self.allow_all_prereleases else None - ), - ) + compatible_versions = set( + req.specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + [str(c.version) for c in all_candidates], + prereleases=( + self.allow_all_prereleases + if self.allow_all_prereleases else None + ), ) - else: - compatible_versions = [str(c.version) for c in all_candidates] + ) applicable_candidates = [ # Again, converting to str to deal with debundling. c for c in all_candidates if str(c.version) in compatible_versions @@ -618,8 +624,6 @@ def _get_pages(self, locations, project_name): try: page = self._get_page(location) except requests.HTTPError as e: - page = None - if page is None: continue yield page @@ -666,7 +670,6 @@ def _link_package_versions(self, link, search, ignore_compatibility=True): if not ext: self._log_skipped_link(link, 'not a file') return - # Always ignore unsupported extensions even when we ignore compatibility if ext not in SUPPORTED_EXTENSIONS: self._log_skipped_link( link, 'unsupported archive format: %s' % ext, @@ -709,7 +712,7 @@ def _link_package_versions(self, link, search, ignore_compatibility=True): version = egg_info_matches(egg_info, search.supplied, link) if version is None: self._log_skipped_link( - link, 'wrong project name (not %s)' % search.supplied) + link, 'Missing project version for %s' % search.supplied) return match = self._py_version_re.search(version) diff --git a/pipenv/patched/notpip/_internal/locations.py b/pipenv/patched/notpip/_internal/locations.py index 43586f4b37..29c6db7962 100644 --- a/pipenv/patched/notpip/_internal/locations.py +++ b/pipenv/patched/notpip/_internal/locations.py @@ -8,7 +8,7 @@ import sys import sysconfig from distutils import sysconfig as distutils_sysconfig -from distutils.command.install import SCHEME_KEYS, install # type: ignore +from distutils.command.install import SCHEME_KEYS # type: ignore from pipenv.patched.notpip._internal.compat import WINDOWS, expanduser from pipenv.patched.notpip._internal.utils import appdirs diff --git a/pipenv/patched/notpip/_internal/models/__init__.py b/pipenv/patched/notpip/_internal/models/__init__.py index e4de2e9db4..7855226e4b 100644 --- a/pipenv/patched/notpip/_internal/models/__init__.py +++ b/pipenv/patched/notpip/_internal/models/__init__.py @@ -1,4 +1,2 @@ -from pipenv.patched.notpip._internal.models.index import Index, PyPI - - -__all__ = ["Index", "PyPI"] +"""A package that contains models that represent entities. +""" diff --git a/pipenv/patched/notpip/_internal/operations/check.py b/pipenv/patched/notpip/_internal/operations/check.py index bc2c2b619b..9c8ea08eb9 100644 --- a/pipenv/patched/notpip/_internal/operations/check.py +++ b/pipenv/patched/notpip/_internal/operations/check.py @@ -6,13 +6,14 @@ from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name from pipenv.patched.notpip._internal.operations.prepare import make_abstract_dist - from pipenv.patched.notpip._internal.utils.misc import get_installed_distributions from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from pipenv.patched.notpip._internal.req.req_install import InstallRequirement - from typing import Any, Dict, Iterator, Set, Tuple, List + from pipenv.patched.notpip._internal.req.req_install import InstallRequirement # noqa: F401 + from typing import ( # noqa: F401 + Any, Callable, Dict, Iterator, Optional, Set, Tuple, List + ) # Shorthands PackageSet = Dict[str, 'PackageDetails'] @@ -33,17 +34,25 @@ def create_package_set_from_installed(**kwargs): # Default to using all packages installed on the system if kwargs == {}: kwargs = {"local_only": False, "skip": ()} - retval = {} + + package_set = {} for dist in get_installed_distributions(**kwargs): name = canonicalize_name(dist.project_name) - retval[name] = PackageDetails(dist.version, dist.requires()) - return retval + package_set[name] = PackageDetails(dist.version, dist.requires()) + return package_set -def check_package_set(package_set): - # type: (PackageSet) -> CheckResult +def check_package_set(package_set, should_ignore=None): + # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. """ + if should_ignore is None: + def should_ignore(name): + return False + missing = dict() conflicting = dict() @@ -52,6 +61,9 @@ def check_package_set(package_set): missing_deps = set() # type: Set[Missing] conflicting_deps = set() # type: Set[Conflicting] + if should_ignore(package_name): + continue + for req in package_set[package_name].requires: name = canonicalize_name(req.project_name) # type: str @@ -69,13 +81,10 @@ def check_package_set(package_set): if not req.specifier.contains(version, prereleases=True): conflicting_deps.add((name, version, req)) - def str_key(x): - return str(x) - if missing_deps: - missing[package_name] = sorted(missing_deps, key=str_key) + missing[package_name] = sorted(missing_deps, key=str) if conflicting_deps: - conflicting[package_name] = sorted(conflicting_deps, key=str_key) + conflicting[package_name] = sorted(conflicting_deps, key=str) return missing, conflicting @@ -86,21 +95,54 @@ def check_install_conflicts(to_install): installing given requirements """ # Start from the current state - state = create_package_set_from_installed() - _simulate_installation_of(to_install, state) - return state, check_package_set(state) + package_set = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ) + ) # NOTE from @pradyunsg # This required a minor update in dependency link handling logic over at # operations.prepare.IsSDist.dist() to get it working -def _simulate_installation_of(to_install, state): - # type: (List[InstallRequirement], PackageSet) -> None +def _simulate_installation_of(to_install, package_set): + # type: (List[InstallRequirement], PackageSet) -> Set[str] """Computes the version of packages after installing to_install. """ + # Keep track of packages that were installed + installed = set() + # Modify it as installing requirement_set would (assuming no errors) for inst_req in to_install: dist = make_abstract_dist(inst_req).dist(finder=None) name = canonicalize_name(dist.key) - state[name] = PackageDetails(dist.version, dist.requires()) + package_set[name] = PackageDetails(dist.version, dist.requires()) + + installed.add(name) + + return installed + + +def _create_whitelist(would_be_installed, package_set): + # type: (Set[str], PackageSet) -> Set[str] + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].requires: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/pipenv/patched/notpip/_internal/operations/freeze.py b/pipenv/patched/notpip/_internal/operations/freeze.py index e81cc1cb93..532989bcbd 100644 --- a/pipenv/patched/notpip/_internal/operations/freeze.py +++ b/pipenv/patched/notpip/_internal/operations/freeze.py @@ -4,7 +4,6 @@ import logging import os import re -import warnings from pipenv.patched.notpip._vendor import pkg_resources, six from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name @@ -13,7 +12,7 @@ from pipenv.patched.notpip._internal.exceptions import InstallationError from pipenv.patched.notpip._internal.req import InstallRequirement from pipenv.patched.notpip._internal.req.req_file import COMMENT_RE -from pipenv.patched.notpip._internal.utils.deprecation import RemovedInPip11Warning +from pipenv.patched.notpip._internal.utils.deprecation import deprecated from pipenv.patched.notpip._internal.utils.misc import ( dist_is_editable, get_installed_distributions, ) @@ -216,10 +215,12 @@ def from_dist(cls, dist, dependency_links): 'for this package:' ) else: - warnings.warn( + deprecated( "SVN editable detection based on dependency links " "will be dropped in the future.", - RemovedInPip11Warning, + replacement=None, + gone_in="18.2", + issue=4187, ) comments.append( '# Installing as editable to satisfy requirement %s:' % diff --git a/pipenv/patched/notpip/_internal/operations/prepare.py b/pipenv/patched/notpip/_internal/operations/prepare.py index 46538373cf..9ebc3ebdc8 100644 --- a/pipenv/patched/notpip/_internal/operations/prepare.py +++ b/pipenv/patched/notpip/_internal/operations/prepare.py @@ -1,15 +1,12 @@ """Prepares a distribution for installation """ -import itertools import logging import os -import sys -from copy import copy from pipenv.patched.notpip._vendor import pkg_resources, requests -from pipenv.patched.notpip._internal.build_env import NoOpBuildEnvironment +from pipenv.patched.notpip._internal.build_env import BuildEnvironment from pipenv.patched.notpip._internal.compat import expanduser from pipenv.patched.notpip._internal.download import ( is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, @@ -18,14 +15,9 @@ DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, PreviousBuildDirError, VcsHashUnsupported, ) -from pipenv.patched.notpip._internal.index import FormatControl -from pipenv.patched.notpip._internal.req.req_install import InstallRequirement from pipenv.patched.notpip._internal.utils.hashes import MissingHashes from pipenv.patched.notpip._internal.utils.logging import indent_log -from pipenv.patched.notpip._internal.utils.misc import ( - call_subprocess, display_path, normalize_path, -) -from pipenv.patched.notpip._internal.utils.ui import open_spinner +from pipenv.patched.notpip._internal.utils.misc import display_path, normalize_path, rmtree from pipenv.patched.notpip._internal.vcs import vcs logger = logging.getLogger(__name__) @@ -47,26 +39,6 @@ def make_abstract_dist(req): return IsSDist(req) -def _install_build_reqs(finder, prefix, build_requirements): - # NOTE: What follows is not a very good thing. - # Eventually, this should move into the BuildEnvironment class and - # that should handle all the isolation and sub-process invocation. - finder = copy(finder) - finder.format_control = FormatControl(set(), set([":all:"])) - urls = [ - finder.find_requirement( - InstallRequirement.from_line(r), upgrade=False).url - for r in build_requirements - ] - args = [ - sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--no-user', '--prefix', prefix, - ] + list(urls) - - with open_spinner("Installing build dependencies") as spinner: - call_subprocess(args, show_stdout=False, spinner=spinner) - - class DistAbstraction(object): """Abstracts out the wheel vs non-wheel Resolver.resolve() logic. @@ -123,33 +95,33 @@ def dist(self, finder): def prep_for_dist(self, finder, build_isolation): # Before calling "setup.py egg_info", we need to set-up the build # environment. - build_requirements, isolate = self.req.get_pep_518_info() - should_isolate = build_isolation and isolate - - minimum_requirements = ('setuptools', 'wheel') - missing_requirements = set(minimum_requirements) - set( - pkg_resources.Requirement(r).key - for r in build_requirements - ) - if missing_requirements: - def format_reqs(rs): - return ' and '.join(map(repr, sorted(rs))) - logger.warning( - "Missing build time requirements in pyproject.toml for %s: " - "%s.", self.req, format_reqs(missing_requirements) - ) - logger.warning( - "This version of pip does not implement PEP 517 so it cannot " - "build a wheel without %s.", format_reqs(minimum_requirements) - ) + build_requirements = self.req.get_pep_518_info() + should_isolate = build_isolation and build_requirements is not None if should_isolate: - with self.req.build_env: - pass - _install_build_reqs(finder, self.req.build_env.path, - build_requirements) - else: - self.req.build_env = NoOpBuildEnvironment(no_clean=False) + # Haven't implemented PEP 517 yet, so spew a warning about it if + # build-requirements don't include setuptools and wheel. + missing_requirements = {'setuptools', 'wheel'} - { + pkg_resources.Requirement(r).key for r in build_requirements + } + if missing_requirements: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "This version of pip does not implement PEP 517 so it " + "cannot build a wheel without %s.", + " and ".join(map(repr, sorted(missing_requirements))) + ) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, build_requirements, + "Installing build dependencies" + ) try: self.req.run_egg_info() @@ -173,11 +145,12 @@ class RequirementPreparer(object): """ def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, - progress_bar, build_isolation): + progress_bar, build_isolation, req_tracker): super(RequirementPreparer, self).__init__() self.src_dir = src_dir self.build_dir = build_dir + self.req_tracker = req_tracker # Where still packed archives should be written to. If None, they are # not saved, and are deleted immediately after unpacking. @@ -236,16 +209,8 @@ def prepare_linked_requirement(self, req, session, finder, # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` - # package unpacked in `req.source_dir` - # if os.path.exists(os.path.join(req.source_dir, 'setup.py')): - # raise PreviousBuildDirError( - # "pip can't proceed with requirements '%s' due to a" - # " pre-existing build directory (%s). This is " - # "likely due to a previous installation that failed" - # ". pip is being responsible and not assuming it " - # "can delete this. Please delete it and try again." - # % (req, req.source_dir) - # ) + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + rmtree(req.source_dir) req.populate_link(finder, upgrade_allowed, require_hashes) # We can't hit this spot and have populate_link return None. @@ -325,7 +290,8 @@ def prepare_linked_requirement(self, req, session, finder, (req, exc, req.link) ) abstract_dist = make_abstract_dist(req) - abstract_dist.prep_for_dist(finder, self.build_isolation) + with self.req_tracker.track(req): + abstract_dist.prep_for_dist(finder, self.build_isolation) if self._download_should_save: # Make a .zip of the source_dir we already created. if req.link.scheme in vcs.all_schemes: @@ -351,7 +317,8 @@ def prepare_editable_requirement(self, req, require_hashes, use_user_site, req.update_editable(not self._download_should_save) abstract_dist = make_abstract_dist(req) - abstract_dist.prep_for_dist(finder, self.build_isolation) + with self.req_tracker.track(req): + abstract_dist.prep_for_dist(finder, self.build_isolation) if self._download_should_save: req.archive(self.download_dir) diff --git a/pipenv/patched/notpip/_internal/req/__init__.py b/pipenv/patched/notpip/_internal/req/__init__.py index 9eece6ecf9..72aecb7c27 100644 --- a/pipenv/patched/notpip/_internal/req/__init__.py +++ b/pipenv/patched/notpip/_internal/req/__init__.py @@ -48,7 +48,7 @@ def install_given_reqs(to_install, install_options, global_options=(), *args, **kwargs ) - except: + except Exception: should_rollback = ( requirement.conflicts_with and not requirement.install_succeeded diff --git a/pipenv/patched/notpip/_internal/req/req_install.py b/pipenv/patched/notpip/_internal/req/req_install.py index 6347200c27..a9c642c0e9 100644 --- a/pipenv/patched/notpip/_internal/req/req_install.py +++ b/pipenv/patched/notpip/_internal/req/req_install.py @@ -1,5 +1,6 @@ from __future__ import absolute_import +import io import logging import os import re @@ -7,7 +8,6 @@ import sys import sysconfig import traceback -import warnings import zipfile from distutils.util import change_root from email.parser import FeedParser # type: ignore @@ -17,22 +17,21 @@ from pipenv.patched.notpip._vendor.packaging.markers import Marker from pipenv.patched.notpip._vendor.packaging.requirements import InvalidRequirement, Requirement from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name -from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version from pipenv.patched.notpip._vendor.packaging.version import Version +from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version from pipenv.patched.notpip._vendor.pkg_resources import RequirementParseError, parse_requirements from pipenv.patched.notpip._internal import wheel -from pipenv.patched.notpip._internal.build_env import BuildEnvironment +from pipenv.patched.notpip._internal.build_env import NoOpBuildEnvironment from pipenv.patched.notpip._internal.compat import native_str from pipenv.patched.notpip._internal.download import ( is_archive_file, is_url, path_to_url, url_to_path, ) -from pipenv.patched.notpip._internal.exceptions import InstallationError, UninstallationError +from pipenv.patched.notpip._internal.exceptions import InstallationError from pipenv.patched.notpip._internal.locations import ( PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, ) from pipenv.patched.notpip._internal.req.req_uninstall import UninstallPathSet -from pipenv.patched.notpip._internal.utils.deprecation import RemovedInPip11Warning from pipenv.patched.notpip._internal.utils.hashes import Hashes from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.misc import ( @@ -127,8 +126,10 @@ def __init__(self, req, comes_from, source_dir=None, editable=False, self.is_direct = False self.isolated = isolated - self.build_env = BuildEnvironment(no_clean=True) + self.build_env = NoOpBuildEnvironment() + # Constructors + # TODO: Move these out of this class into custom methods. @classmethod def from_editable(cls, editable_req, comes_from=None, isolated=False, options=None, wheel_cache=None, constraint=False): @@ -311,6 +312,13 @@ def populate_link(self, finder, upgrade, require_hashes): if old_link != self.link: logger.debug('Using cached wheel link: %s', self.link) + # Things that are valid for all kinds of requirements? + @property + def name(self): + if self.req is None: + return None + return native_str(pkg_resources.safe_name(self.req.name)) + @property def specifier(self): return self.req.specifier @@ -325,7 +333,56 @@ def is_pinned(self): return (len(specifiers) == 1 and next(iter(specifiers)).operator in {'==', '==='}) + @property + def installed_version(self): + return get_installed_version(self.name) + + def match_markers(self, extras_requested=None): + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ('',) + if self.markers is not None: + return any( + self.markers.evaluate({'extra': extra}) + for extra in extras_requested) + else: + return True + + @property + def has_hash_options(self): + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.options.get('hashes', {})) + + def hashes(self, trust_internet=True): + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.options.get('hashes', {}).copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + def from_path(self): + """Format a nice indicator to show where this "comes from" + """ if self.req is None: return None s = str(self.req) @@ -398,12 +455,78 @@ def _correct_build_location(self): self.source_dir = os.path.normpath(os.path.abspath(new_location)) self._egg_info_path = None - @property - def name(self): + def remove_temporary_source(self): + """Remove the source files from this requirement, if they are marked + for deletion""" + if self.source_dir and os.path.exists( + os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): + logger.debug('Removing source in %s', self.source_dir) + rmtree(self.source_dir) + self.source_dir = None + self._temp_build_dir.cleanup() + self.build_env.cleanup() + + def check_if_exists(self, use_user_site): + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.conflicts_with appropriately. + """ if self.req is None: - return None - return native_str(pkg_resources.safe_name(self.req.name)) + return False + try: + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None + self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) + if self.editable and self.satisfied_by: + self.conflicts_with = self.satisfied_by + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + return True + except pkg_resources.DistributionNotFound: + return False + except pkg_resources.VersionConflict: + existing_dist = pkg_resources.get_distribution( + self.req.name + ) + if use_user_site: + if dist_in_usersite(existing_dist): + self.conflicts_with = existing_dist + elif (running_under_virtualenv() and + dist_in_site_packages(existing_dist)): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to %s in %s" % + (existing_dist.project_name, existing_dist.location) + ) + else: + self.conflicts_with = existing_dist + return True + # Things valid for wheels + @property + def is_wheel(self): + return self.link and self.link.is_wheel + + def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, + warn_script_location=True, use_user_site=False, + pycompile=True): + move_wheel_files( + self.name, self.req, wheeldir, + user=use_user_site, + home=home, + root=root, + prefix=prefix, + pycompile=pycompile, + isolated=self.isolated, + warn_script_location=warn_script_location, + ) + + # Things valid for sdists @property def setup_py_dir(self): return os.path.join( @@ -435,20 +558,47 @@ def pyproject_toml(self): return pp_toml def get_pep_518_info(self): - """Get a list of the packages required to build the project, if any, - and a flag indicating whether pyproject.toml is present, indicating - that the build should be isolated. + """Get PEP 518 build-time requirements. - Build requirements can be specified in a pyproject.toml, as described - in PEP 518. If this file exists but doesn't specify build - requirements, pip will default to installing setuptools and wheel. + Returns the list of the packages required to build the project, + specified as per PEP 518 within the package. If `pyproject.toml` is not + present, returns None to signify not using the same. """ - if os.path.isfile(self.pyproject_toml): - with open(self.pyproject_toml) as f: - pp_toml = pytoml.load(f) - build_sys = pp_toml.get('build-system', {}) - return (build_sys.get('requires', ['setuptools', 'wheel']), True) - return (['setuptools', 'wheel'], False) + # If pyproject.toml does not exist, don't do anything. + if not os.path.isfile(self.pyproject_toml): + return None + + error_template = ( + "{package} has a pyproject.toml file that does not comply " + "with PEP 518: {reason}" + ) + + with io.open(self.pyproject_toml, encoding="utf-8") as f: + pp_toml = pytoml.load(f) + + # If there is no build-system table, just use setuptools and wheel. + if "build-system" not in pp_toml: + return ["setuptools", "wheel"] + + # Specifying the build-system table but not the requires key is invalid + build_system = pp_toml["build-system"] + if "requires" not in build_system: + raise InstallationError( + error_template.format(package=self, reason=( + "it has a 'build-system' table but not " + "'build-system.requires' which is mandatory in the table" + )) + ) + + # Error out if it's not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InstallationError(error_template.format( + package=self, + reason="'build-system.requires' is not a list of strings.", + )) + + return requires def run_egg_info(self): assert self.source_dir @@ -559,11 +709,9 @@ def egg_info_path(self, filename): if not filenames: raise InstallationError( - 'No files/directories in %s (from %s)' % (base, filename) + "Files/directories (from %s) not found in %s" + % (filename, base) ) - assert filenames, \ - "No files/directories in %s (from %s)" % (base, filename) - # if we have more than one match, we pick the toplevel one. This # can easily be the case if there is a dist folder which contains # an extracted tarball for testing purposes. @@ -588,9 +736,17 @@ def pkg_info(self): _requirements_section_re = re.compile(r'\[(.*?)\]') - @property - def installed_version(self): - return get_installed_version(self.name) + def get_dist(self): + """Return a pkg_resources.Distribution built from self.egg_info_path""" + egg_info = self.egg_info_path('').rstrip(os.path.sep) + base_dir = os.path.dirname(egg_info) + metadata = pkg_resources.PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + return pkg_resources.Distribution( + os.path.dirname(egg_info), + project_name=dist_name, + metadata=metadata, + ) def assert_source_matches_version(self): assert self.source_dir @@ -609,6 +765,52 @@ def assert_source_matches_version(self): self, ) + # For both source distributions and editables + def ensure_has_source_dir(self, parent_dir): + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.build_location(parent_dir) + return self.source_dir + + # For editable installations + def install_editable(self, install_options, + global_options=(), prefix=None): + logger.info('Running setup.py develop for %s', self.name) + + if self.isolated: + global_options = list(global_options) + ["--no-user-cfg"] + + if prefix: + prefix_param = ['--prefix={}'.format(prefix)] + install_options = list(install_options) + prefix_param + + with indent_log(): + # FIXME: should we do --install-headers here too? + with self.build_env: + call_subprocess( + [ + os.environ.get('PIP_PYTHON_PATH', sys.executable), + '-c', + SETUPTOOLS_SHIM % self.setup_py + ] + + list(global_options) + + ['develop', '--no-deps'] + + list(install_options), + + cwd=self.setup_py_dir, + show_stdout=False, + ) + + self.install_succeeded = True + def update_editable(self, obtain=True): if not self.link: logger.debug( @@ -638,6 +840,7 @@ def update_editable(self, obtain=True): 'Unexpected version control type (in %s): %s' % (self.link, vc_type)) + # Top-level Actions def uninstall(self, auto_confirm=False, verbose=False, use_user_site=False): """ @@ -661,6 +864,16 @@ def uninstall(self, auto_confirm=False, verbose=False, uninstalled_pathset.remove(auto_confirm, verbose) return uninstalled_pathset + def _clean_zip_name(self, name, prefix): # only used by archive. + assert name.startswith(prefix + os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + + # TODO: Investigate if this should be kept in InstallRequirement + # Seems to be used only when VCS + downloads def archive(self, build_dir): assert self.source_dir create_archive = True @@ -709,26 +922,6 @@ def archive(self, build_dir): zip.close() logger.info('Saved %s', display_path(archive_path)) - def _clean_zip_name(self, name, prefix): - assert name.startswith(prefix + os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix) - ) - name = name[len(prefix) + 1:] - name = name.replace(os.path.sep, '/') - return name - - def match_markers(self, extras_requested=None): - if not extras_requested: - # Provide an extra to safely evaluate the markers - # without matching any extra - extras_requested = ('',) - if self.markers is not None: - return any( - self.markers.evaluate({'extra': extra}) - for extra in extras_requested) - else: - return True - def install(self, install_options, global_options=None, root=None, home=None, prefix=None, warn_script_location=True, use_user_site=False, pycompile=True): @@ -820,20 +1013,6 @@ def prepend_root(path): with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n') - def ensure_has_source_dir(self, parent_dir): - """Ensure that a source_dir is set. - - This will create a temporary build dir if the name of the requirement - isn't known yet. - - :param parent_dir: The ideal pip parent_dir for the source_dir. - Generally src_dir for editables and build_dir for sdists. - :return: self.source_dir - """ - if self.source_dir is None: - self.source_dir = self.build_location(parent_dir) - return self.source_dir - def get_install_args(self, global_options, record_filename, root, prefix, pycompile): install_args = [os.environ.get('PIP_PYTHON_PATH', sys.executable), "-u"] @@ -861,165 +1040,6 @@ def get_install_args(self, global_options, record_filename, root, prefix, return install_args - def remove_temporary_source(self): - """Remove the source files from this requirement, if they are marked - for deletion""" - if self.source_dir and os.path.exists( - os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): - logger.debug('Removing source in %s', self.source_dir) - rmtree(self.source_dir) - self.source_dir = None - self._temp_build_dir.cleanup() - self.build_env.cleanup() - - def install_editable(self, install_options, - global_options=(), prefix=None): - logger.info('Running setup.py develop for %s', self.name) - - if self.isolated: - global_options = list(global_options) + ["--no-user-cfg"] - - if prefix: - prefix_param = ['--prefix={}'.format(prefix)] - install_options = list(install_options) + prefix_param - - with indent_log(): - # FIXME: should we do --install-headers here too? - with self.build_env: - call_subprocess( - [ - os.environ.get('PIP_PYTHON_PATH', sys.executable), - '-c', - SETUPTOOLS_SHIM % self.setup_py - ] + - list(global_options) + - ['develop', '--no-deps'] + - list(install_options), - - cwd=self.setup_py_dir, - show_stdout=False, - ) - - self.install_succeeded = True - - def check_if_exists(self, use_user_site): - """Find an installed distribution that satisfies or conflicts - with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately. - """ - if self.req is None: - return False - try: - # get_distribution() will resolve the entire list of requirements - # anyway, and we've already determined that we need the requirement - # in question, so strip the marker so that we don't try to - # evaluate it. - no_marker = Requirement(str(self.req)) - no_marker.marker = None - self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) - if self.editable and self.satisfied_by: - self.conflicts_with = self.satisfied_by - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - return True - except pkg_resources.DistributionNotFound: - return False - except pkg_resources.VersionConflict: - existing_dist = pkg_resources.get_distribution( - self.req.name - ) - if use_user_site: - if dist_in_usersite(existing_dist): - self.conflicts_with = existing_dist - elif (running_under_virtualenv() and - dist_in_site_packages(existing_dist)): - raise InstallationError( - "Will not install to the user site because it will " - "lack sys.path precedence to %s in %s" % - (existing_dist.project_name, existing_dist.location) - ) - else: - self.conflicts_with = existing_dist - return True - - @property - def is_wheel(self): - return self.link and self.link.is_wheel - - def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, - warn_script_location=True, use_user_site=False, - pycompile=True): - move_wheel_files( - self.name, self.req, wheeldir, - user=use_user_site, - home=home, - root=root, - prefix=prefix, - pycompile=pycompile, - isolated=self.isolated, - warn_script_location=warn_script_location, - ) - - def get_dist(self): - """Return a pkg_resources.Distribution built from self.egg_info_path""" - egg_info = self.egg_info_path('').rstrip(os.path.sep) - base_dir = os.path.dirname(egg_info) - metadata = pkg_resources.PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - return pkg_resources.Distribution( - os.path.dirname(egg_info), - project_name=dist_name, - metadata=metadata, - ) - - @property - def has_hash_options(self): - """Return whether any known-good hashes are specified as options. - - These activate --require-hashes mode; hashes specified as part of a - URL do not. - - """ - return bool(self.options.get('hashes', {})) - - def hashes(self, trust_internet=True): - """Return a hash-comparer that considers my option- and URL-based - hashes to be known-good. - - Hashes in URLs--ones embedded in the requirements file, not ones - downloaded from an index server--are almost peers with ones from - flags. They satisfy --require-hashes (whether it was implicitly or - explicitly activated) but do not activate it. md5 and sha224 are not - allowed in flags, which should nudge people toward good algos. We - always OR all hashes together, even ones from URLs. - - :param trust_internet: Whether to trust URL-based (#md5=...) hashes - downloaded from the internet, as by populate_link() - - """ - good_hashes = self.options.get('hashes', {}).copy() - link = self.link if trust_internet else self.original_link - if link and link.hash: - good_hashes.setdefault(link.hash_name, []).append(link.hash) - return Hashes(good_hashes) - - -def _strip_postfix(req): - """ - Strip req postfix ( -dev, 0.2, etc ) - """ - # FIXME: use package_to_requirement? - match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) - if match: - # Strip off -dev, -0.2, etc. - warnings.warn( - "#egg cleanup for editable urls will be dropped in the future", - RemovedInPip11Warning, - ) - req = match.group(1) - return req - def parse_editable(editable_req): """Parses an editable requirement into: @@ -1085,7 +1105,7 @@ def parse_editable(editable_req): "Could not detect requirement name for '%s', please specify one " "with #egg=your_package_name" % editable_req ) - return _strip_postfix(package_name), url, None + return package_name, url, None def deduce_helpful_msg(req): @@ -1113,3 +1133,10 @@ def deduce_helpful_msg(req): else: msg += " File '%s' does not exist." % (req) return msg + + +def _is_list_of_str(obj): + return ( + isinstance(obj, list) and + all(isinstance(item, six.string_types) for item in obj) + ) diff --git a/pipenv/patched/notpip/_internal/req/req_set.py b/pipenv/patched/notpip/_internal/req/req_set.py index 2fd09f5eac..2c54c85a9f 100644 --- a/pipenv/patched/notpip/_internal/req/req_set.py +++ b/pipenv/patched/notpip/_internal/req/req_set.py @@ -14,9 +14,6 @@ class RequirementSet(object): def __init__(self, require_hashes=False, ignore_compatibility=True): """Create a RequirementSet. - - :param wheel_cache: The pip wheel cache, for passing to - InstallRequirement. """ self.requirements = OrderedDict() diff --git a/pipenv/patched/notpip/_internal/req/req_tracker.py b/pipenv/patched/notpip/_internal/req/req_tracker.py new file mode 100644 index 0000000000..6e0201fe97 --- /dev/null +++ b/pipenv/patched/notpip/_internal/req/req_tracker.py @@ -0,0 +1,76 @@ +from __future__ import absolute_import + +import contextlib +import errno +import hashlib +import logging +import os + +from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class RequirementTracker(object): + + def __init__(self): + self._root = os.environ.get('PIP_REQ_TRACKER') + if self._root is None: + self._temp_dir = TempDirectory(delete=False, kind='req-tracker') + self._temp_dir.create() + self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path + logger.debug('Created requirements tracker %r', self._root) + else: + self._temp_dir = None + logger.debug('Re-using requirements tracker %r', self._root) + self._entries = set() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.cleanup() + + def _entry_path(self, link): + hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req): + link = req.link + info = str(req) + entry_path = self._entry_path(link) + try: + with open(entry_path) as fp: + # Error, these's already a build in progress. + raise LookupError('%s is already being built: %s' + % (link, fp.read())) + except IOError as e: + if e.errno != errno.ENOENT: + raise + assert req not in self._entries + with open(entry_path, 'w') as fp: + fp.write(info) + self._entries.add(req) + logger.debug('Added %s to build tracker %r', req, self._root) + + def remove(self, req): + link = req.link + self._entries.remove(req) + os.unlink(self._entry_path(link)) + logger.debug('Removed %s from build tracker %r', req, self._root) + + def cleanup(self): + for req in set(self._entries): + self.remove(req) + remove = self._temp_dir is not None + if remove: + self._temp_dir.cleanup() + logger.debug('%s build tracker %r', + 'Removed' if remove else 'Cleaned', + self._root) + + @contextlib.contextmanager + def track(self, req): + self.add(req) + yield + self.remove(req) diff --git a/pipenv/patched/notpip/_internal/req/req_uninstall.py b/pipenv/patched/notpip/_internal/req/req_uninstall.py index 0104a09e89..3ccd3265b6 100644 --- a/pipenv/patched/notpip/_internal/req/req_uninstall.py +++ b/pipenv/patched/notpip/_internal/req/req_uninstall.py @@ -57,12 +57,12 @@ def unique(*args, **kw): @_unique def uninstallation_paths(dist): """ - Yield all the uninstallation paths for dist based on RECORD-without-.pyc + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] Yield paths to all the files in RECORD. For each .py file in RECORD, add - the .pyc in the same directory. + the .pyc and .pyo in the same directory. - UninstallPathSet.add() takes care of the __pycache__ .pyc. + UninstallPathSet.add() takes care of the __pycache__ .py[co]. """ r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) for row in r: @@ -73,6 +73,8 @@ def uninstallation_paths(dist): base = fn[:-3] path = os.path.join(dn, base + '.pyc') yield path + path = os.path.join(dn, base + '.pyo') + yield path def compact(paths): diff --git a/pipenv/patched/notpip/_internal/resolve.py b/pipenv/patched/notpip/_internal/resolve.py index 5b3d0c0065..461a2bb879 100644 --- a/pipenv/patched/notpip/_internal/resolve.py +++ b/pipenv/patched/notpip/_internal/resolve.py @@ -18,7 +18,6 @@ BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, UnsupportedPythonVersion, ) - from pipenv.patched.notpip._internal.req.req_install import InstallRequirement from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.misc import dist_in_usersite, ensure_dir @@ -164,7 +163,7 @@ def _check_skip_installed(self, req_to_install): if not self._is_upgrade_allowed(req_to_install): if self.upgrade_strategy == "only-if-needed": - return 'not upgraded as not directly required' + return 'already satisfied, skipping upgrade' return 'already satisfied' # Check for the possibility of an upgrade. For link-based diff --git a/pipenv/patched/notpip/_internal/utils/deprecation.py b/pipenv/patched/notpip/_internal/utils/deprecation.py index e5ab59ab75..b140ac7199 100644 --- a/pipenv/patched/notpip/_internal/utils/deprecation.py +++ b/pipenv/patched/notpip/_internal/utils/deprecation.py @@ -6,72 +6,84 @@ import logging import warnings +from pipenv.patched.notpip._vendor.packaging.version import parse + +from pipenv.patched.notpip import __version__ as current_version from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any + from typing import Any, Optional # noqa: F401 class PipDeprecationWarning(Warning): pass -class Pending(object): - pass - - -class RemovedInPip11Warning(PipDeprecationWarning): - pass - - -class RemovedInPip12Warning(PipDeprecationWarning, Pending): - pass +_original_showwarning = None # type: Any # Warnings <-> Logging Integration - - -_warnings_showwarning = None # type: Any - - def _showwarning(message, category, filename, lineno, file=None, line=None): if file is not None: - if _warnings_showwarning is not None: - _warnings_showwarning( + if _original_showwarning is not None: + _original_showwarning( message, category, filename, lineno, file, line, ) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) else: - if issubclass(category, PipDeprecationWarning): - # We use a specially named logger which will handle all of the - # deprecation messages for pip. - logger = logging.getLogger("pip._internal.deprecations") - - # This is purposely using the % formatter here instead of letting - # the logging module handle the interpolation. This is because we - # want it to appear as if someone typed this entire message out. - log_message = "DEPRECATION: %s" % message - - # PipDeprecationWarnings that are Pending still have at least 2 - # versions to go until they are removed so they can just be - # warnings. Otherwise, they will be removed in the very next - # version of pip. We want these to be more obvious so we use the - # ERROR logging level. - if issubclass(category, Pending): - logger.warning(log_message) - else: - logger.error(log_message) - else: - _warnings_showwarning( - message, category, filename, lineno, file, line, - ) + _original_showwarning( + message, category, filename, lineno, file, line, + ) def install_warning_logger(): # Enable our Deprecation Warnings warnings.simplefilter("default", PipDeprecationWarning, append=True) - global _warnings_showwarning + global _original_showwarning - if _warnings_showwarning is None: - _warnings_showwarning = warnings.showwarning + if _original_showwarning is None: + _original_showwarning = warnings.showwarning warnings.showwarning = _showwarning + + +def deprecated(reason, replacement, gone_in, issue=None): + # type: (str, Optional[str], Optional[str], Optional[int]) -> None + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises errors if pip's current version is greater than or equal to + this. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + + Always pass replacement, gone_in and issue as keyword arguments for clarity + at the call site. + """ + + # Construct a nice message. + # This is purposely eagerly formatted as we want it to appear as if someone + # typed this entire message out. + message = "DEPRECATION: " + reason + if replacement is not None: + message += " A possible replacement is {}.".format(replacement) + if issue is not None: + url = "https://github.com/pypa/pip/issues/" + str(issue) + message += " You can find discussion regarding this at {}.".format(url) + + # Raise as an error if it has to be removed. + if gone_in is not None and parse(current_version) >= parse(gone_in): + raise PipDeprecationWarning(message) + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/pipenv/patched/notpip/_internal/utils/logging.py b/pipenv/patched/notpip/_internal/utils/logging.py index 5864fe1456..257a62346c 100644 --- a/pipenv/patched/notpip/_internal/utils/logging.py +++ b/pipenv/patched/notpip/_internal/utils/logging.py @@ -97,7 +97,7 @@ def should_color(self): if hasattr(real_stream, "isatty") and real_stream.isatty(): return True - # If we have an ASNI term we should color it + # If we have an ANSI term we should color it if os.environ.get("TERM") == "ANSI": return True @@ -130,3 +130,96 @@ def __init__(self, level): def filter(self, record): return record.levelno < self.level + + +def setup_logging(verbosity, no_color, user_log_file): + """Configures and sets up all of the logging + """ + + # Determine the level to be logging at. + if verbosity >= 1: + level = "DEBUG" + elif verbosity == -1: + level = "WARNING" + elif verbosity == -2: + level = "ERROR" + elif verbosity <= -3: + level = "CRITICAL" + else: + level = "INFO" + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.ColorizedStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + + logging.config.dictConfig({ + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "delay": True, + "formatter": "indent", + }, + }, + "root": { + "level": root_level, + "handlers": ["console", "console_errors"] + ( + ["user_log"] if include_user_log else [] + ), + }, + "loggers": { + "pip._vendor": { + "level": vendored_log_level + } + }, + }) diff --git a/pipenv/patched/notpip/_internal/utils/misc.py b/pipenv/patched/notpip/_internal/utils/misc.py index 459b309ad4..e254f3d436 100644 --- a/pipenv/patched/notpip/_internal/utils/misc.py +++ b/pipenv/patched/notpip/_internal/utils/misc.py @@ -24,9 +24,12 @@ from pipenv.patched.notpip._vendor.retrying import retry # type: ignore from pipenv.patched.notpip._vendor.six import PY2 from pipenv.patched.notpip._vendor.six.moves import input +from pipenv.patched.notpip._vendor.six.moves.urllib import parse as urllib_parse -from pipenv.patched.notpip._internal.compat import console_to_str, expanduser, stdlib_pkgs -from pipenv.patched.notpip._internal.exceptions import InstallationError +from pipenv.patched.notpip._internal.compat import ( + WINDOWS, console_to_str, expanduser, stdlib_pkgs, +) +from pipenv.patched.notpip._internal.exceptions import CommandError, InstallationError from pipenv.patched.notpip._internal.locations import ( running_under_virtualenv, site_packages, user_site, virtualenv_no_global, write_delete_marker_file, @@ -47,7 +50,7 @@ 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', 'captured_stdout', 'ensure_dir', 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', - 'get_installed_version'] + 'get_installed_version', 'remove_auth_from_url'] logger = std_logging.getLogger(__name__) @@ -818,17 +821,15 @@ def __get__(self, obj, cls): return value -def get_installed_version(dist_name, lookup_dirs=None): +def get_installed_version(dist_name, working_set=None): """Get the installed version of dist_name avoiding pkg_resources cache""" # Create a requirement that we'll look for inside of setuptools. req = pkg_resources.Requirement.parse(dist_name) - # We want to avoid having this cached, so we need to construct a new - # working set each time. - if lookup_dirs is None: + if working_set is None: + # We want to avoid having this cached, so we need to construct a new + # working set each time. working_set = pkg_resources.WorkingSet() - else: - working_set = pkg_resources.WorkingSet(lookup_dirs) # Get the installed distribution from our working set dist = working_set.find(req) @@ -849,3 +850,50 @@ def enum(*sequential, **named): reverse = {value: key for key, value in enums.items()} enums['reverse_mapping'] = reverse return type('Enum', (), enums) + + +def remove_auth_from_url(url): + # Return a copy of url with 'username:password@' removed. + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + + # parsed url + purl = urllib_parse.urlsplit(url) + stripped_netloc = \ + purl.netloc.split('@')[-1] + + # stripped url + url_pieces = ( + purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment + ) + surl = urllib_parse.urlunsplit(url_pieces) + return surl + + +def protect_pip_from_modification_on_windows(modifying_pip): + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]) + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and + WINDOWS and + os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [ + sys.executable, "-m", "pip" + ] + sys.argv[1:] + raise CommandError( + 'To modify pip, please run the following command:\n{}' + .format(" ".join(new_command)) + ) diff --git a/pipenv/patched/notpip/_internal/utils/outdated.py b/pipenv/patched/notpip/_internal/utils/outdated.py index 87911380a9..6133e6fd0a 100644 --- a/pipenv/patched/notpip/_internal/utils/outdated.py +++ b/pipenv/patched/notpip/_internal/utils/outdated.py @@ -6,12 +6,11 @@ import os.path import sys -from pipenv.patched.notpip._vendor import lockfile +from pipenv.patched.notpip._vendor import lockfile, pkg_resources from pipenv.patched.notpip._vendor.packaging import version as packaging_version from pipenv.patched.notpip._internal.compat import WINDOWS from pipenv.patched.notpip._internal.index import PackageFinder -from pipenv.patched.notpip._internal.locations import USER_CACHE_DIR, running_under_virtualenv from pipenv.patched.notpip._internal.utils.filesystem import check_path_owner from pipenv.patched.notpip._internal.utils.misc import ensure_dir, get_installed_version @@ -21,34 +20,9 @@ logger = logging.getLogger(__name__) -class VirtualenvSelfCheckState(object): - def __init__(self): - self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") - - # Load the existing state - try: - with open(self.statefile_path) as statefile: - self.state = json.load(statefile) - except (IOError, ValueError): - self.state = {} - - def save(self, pypi_version, current_time): - # Attempt to write out our version check file - with open(self.statefile_path, "w") as statefile: - json.dump( - { - "last_check": current_time.strftime(SELFCHECK_DATE_FMT), - "pypi_version": pypi_version, - }, - statefile, - sort_keys=True, - separators=(",", ":") - ) - - -class GlobalSelfCheckState(object): - def __init__(self): - self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") +class SelfCheckState(object): + def __init__(self, cache_dir): + self.statefile_path = os.path.join(cache_dir, "selfcheck.json") # Load the existing state try: @@ -84,11 +58,18 @@ def save(self, pypi_version, current_time): separators=(",", ":")) -def load_selfcheck_statefile(): - if running_under_virtualenv(): - return VirtualenvSelfCheckState() - else: - return GlobalSelfCheckState() +def was_installed_by_pip(pkg): + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + try: + dist = pkg_resources.get_distribution(pkg) + return (dist.has_metadata('INSTALLER') and + 'pip' in dist.get_metadata_lines('INSTALLER')) + except pkg_resources.DistributionNotFound: + return False def pip_version_check(session, options): @@ -106,7 +87,7 @@ def pip_version_check(session, options): pypi_version = None try: - state = load_selfcheck_statefile() + state = SelfCheckState(cache_dir=options.cache_dir) current_time = datetime.datetime.utcnow() # Determine if we need to refresh the state @@ -143,7 +124,8 @@ def pip_version_check(session, options): # Determine if our pypi_version is older if (pip_version < remote_version and - pip_version.base_version != remote_version.base_version): + pip_version.base_version != remote_version.base_version and + was_installed_by_pip('pip')): # Advise "python -m pip" on Windows to avoid issues # with overwriting pip.exe. if WINDOWS: diff --git a/pipenv/patched/notpip/_internal/utils/packaging.py b/pipenv/patched/notpip/_internal/utils/packaging.py index eee2146919..1354774374 100644 --- a/pipenv/patched/notpip/_internal/utils/packaging.py +++ b/pipenv/patched/notpip/_internal/utils/packaging.py @@ -1,7 +1,6 @@ from __future__ import absolute_import import logging -import os import sys from email.parser import FeedParser # type: ignore @@ -51,13 +50,7 @@ def check_dist_requires_python(dist, absorb=True): return requires_python try: if not check_requires_python(requires_python): - # raise exceptions.UnsupportedPythonVersion( - # "%s requires Python '%s' but the running Python is %s" % ( - # dist.project_name, - # requires_python, - # '.'.join(map(str, sys.version_info[:3])),) - # ) - return + return requires_python except specifiers.InvalidSpecifier as e: logger.warning( "Package %s has an invalid Requires-Python entry %s - %s", diff --git a/pipenv/patched/notpip/_internal/utils/typing.py b/pipenv/patched/notpip/_internal/utils/typing.py index 170cc6c0e3..56f2fa87e2 100644 --- a/pipenv/patched/notpip/_internal/utils/typing.py +++ b/pipenv/patched/notpip/_internal/utils/typing.py @@ -18,10 +18,10 @@ In pip, all static-typing related imports should be guarded as follows: - from pipenv.patched.notpip.utils.typing import MYPY_CHECK_RUNNING + from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import ... + from typing import ... # noqa: F401 Ref: https://github.com/python/mypy/issues/3216 """ diff --git a/pipenv/patched/notpip/_internal/utils/ui.py b/pipenv/patched/notpip/_internal/utils/ui.py index 2b692a811a..b96863cc44 100644 --- a/pipenv/patched/notpip/_internal/utils/ui.py +++ b/pipenv/patched/notpip/_internal/utils/ui.py @@ -21,7 +21,7 @@ from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any + from typing import Any # noqa: F401 try: from pipenv.patched.notpip._vendor import colorama diff --git a/pipenv/patched/notpip/_internal/vcs/__init__.py b/pipenv/patched/notpip/_internal/vcs/__init__.py index f115de4c97..146f2829d3 100644 --- a/pipenv/patched/notpip/_internal/vcs/__init__.py +++ b/pipenv/patched/notpip/_internal/vcs/__init__.py @@ -1,7 +1,6 @@ """Handles all VCS (version control) support""" from __future__ import absolute_import -import copy import errno import logging import os @@ -17,8 +16,8 @@ from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Dict, Optional, Tuple - from pipenv.patched.notpip._internal.basecommand import Command + from typing import Dict, Optional, Tuple # noqa: F401 + from pipenv.patched.notpip._internal.basecommand import Command # noqa: F401 __all__ = ['vcs', 'get_src_requirement'] @@ -214,7 +213,7 @@ def export(self, location): """ raise NotImplementedError - def get_url_rev(self): + def get_url_rev(self, url): """ Returns the correct repository URL and revision by parsing the given repository URL @@ -224,8 +223,8 @@ def get_url_rev(self): "The format is +://, " "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" ) - assert '+' in self.url, error_message % self.url - url = self.url.split('+', 1)[1] + assert '+' in url, error_message % url + url = url.split('+', 1)[1] scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) rev = None if '@' in path: @@ -233,6 +232,24 @@ def get_url_rev(self): url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) return url, rev + def get_url_rev_args(self, url): + """ + Return the URL and RevOptions "extra arguments" to use in obtain(), + as a tuple (url, extra_args). + """ + return url, [] + + def get_url_rev_options(self, url): + """ + Return the URL and RevOptions object to use in obtain() and in + some cases export(), as a tuple (url, rev_options). + """ + url, rev = self.get_url_rev(url) + url, extra_args = self.get_url_rev_args(url) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return url, rev_options + def get_info(self, location): """ Returns (url, revision), where both are strings @@ -254,10 +271,14 @@ def compare_urls(self, url1, url2): """ return (self.normalize_url(url1) == self.normalize_url(url2)) - def obtain(self, dest): + def fetch_new(self, dest, url, rev_options): """ - Called when installing or updating an editable package, takes the - source path of the checkout. + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. """ raise NotImplementedError @@ -289,94 +310,95 @@ def is_commit_id_equal(self, dest, name): """ raise NotImplementedError - def check_destination(self, dest, url, rev_options): + def obtain(self, dest): """ - Prepare a location to receive a checkout/clone. - - Return True if the location is ready for (and requires) a - checkout/clone, False otherwise. + Install or update in editable mode the package represented by this + VersionControl object. Args: - rev_options: a RevOptions object. + dest: the repository directory in which to install or update. """ - checkout = True - prompt = False + url, rev_options = self.get_url_rev_options(self.url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options) + return + rev_display = rev_options.to_display() - if os.path.exists(dest): - checkout = False - if os.path.exists(os.path.join(dest, self.dirname)): - existing_url = self.get_url(dest) - if self.compare_urls(existing_url, url): - logger.debug( - '%s in %s exists, and has correct URL (%s)', - self.repo_name.title(), + if self.is_repository_directory(dest): + existing_url = self.get_url(dest) + if self.compare_urls(existing_url, url): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + 'Updating %s %s%s', display_path(dest), - url, - ) - if not self.is_commit_id_equal(dest, rev_options.rev): - logger.info( - 'Updating %s %s%s', - display_path(dest), - self.repo_name, - rev_display, - ) - self.update(dest, rev_options) - else: - logger.info( - 'Skipping because already up-to-date.') - else: - logger.warning( - '%s %s in %s exists with URL %s', - self.name, self.repo_name, - display_path(dest), - existing_url, + rev_display, ) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', - ('s', 'i', 'w', 'b')) - else: - logger.warning( - 'Directory %s already exists, and is not a %s %s.', - dest, - self.name, - self.repo_name, - ) - prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) - if prompt: + self.update(dest, rev_options) + else: + logger.info('Skipping because already up-to-date.') + return + logger.warning( - 'The plan is to install the %s repository %s', + '%s %s in %s exists with URL %s', self.name, - url, + self.repo_name, + display_path(dest), + existing_url, ) - response = ask_path_exists('What to do? %s' % prompt[0], - prompt[1]) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) - if response == 's': - logger.info( - 'Switching %s %s to %s%s', - self.repo_name, - display_path(dest), - url, - rev_display, - ) - self.switch(dest, url, rev_options) - elif response == 'i': - # do nothing - pass - elif response == 'w': - logger.warning('Deleting %s', display_path(dest)) - rmtree(dest) - checkout = True - elif response == 'b': - dest_dir = backup_dir(dest) - logger.warning( - 'Backing up %s to %s', display_path(dest), dest_dir, - ) - shutil.move(dest, dest_dir) - checkout = True - elif response == 'a': - sys.exit(-1) - return checkout + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) + + if response == 'a': + sys.exit(-1) + + if response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options) + return + + if response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options) + return + + # Do nothing if the response is "i". + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) def unpack(self, location): """ @@ -399,7 +421,8 @@ def get_src_requirement(self, dist, location): def get_url(self, location): """ Return the url used at location - Used in get_info or check_destination + + This is used in get_info() and obtain(). """ raise NotImplementedError @@ -436,17 +459,26 @@ def run_command(self, cmd, show_stdout=True, cwd=None, else: raise # re-raise exception if a different error occurred + @classmethod + def is_repository_directory(cls, path): + """ + Return whether a directory path is a repository directory. + """ + logger.debug('Checking in %s for %s (%s)...', + path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + @classmethod def controls_location(cls, location): """ Check if a location is controlled by the vcs. It is meant to be overridden to implement smarter detection mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For example, + the Git override checks that Git is actually available. """ - logger.debug('Checking in %s for %s (%s)...', - location, cls.dirname, cls.name) - path = os.path.join(location, cls.dirname) - return os.path.exists(path) + return cls.is_repository_directory(location) def get_src_requirement(dist, location): diff --git a/pipenv/patched/notpip/_internal/vcs/bazaar.py b/pipenv/patched/notpip/_internal/vcs/bazaar.py index f9a36a93bb..b2664cd842 100644 --- a/pipenv/patched/notpip/_internal/vcs/bazaar.py +++ b/pipenv/patched/notpip/_internal/vcs/bazaar.py @@ -48,6 +48,17 @@ def export(self, location): cwd=temp_dir.path, show_stdout=False, ) + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] + self.run_command(cmd_args) + def switch(self, dest, url, rev_options): self.run_command(['switch', url], cwd=dest) @@ -55,23 +66,9 @@ def update(self, dest, rev_options): cmd_args = ['pull', '-q'] + rev_options.to_args() self.run_command(cmd_args, cwd=dest) - def obtain(self, dest): - url, rev = self.get_url_rev() - rev_options = self.make_rev_options(rev) - if self.check_destination(dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Checking out %s%s to %s', - url, - rev_display, - display_path(dest), - ) - cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) - - def get_url_rev(self): + def get_url_rev(self, url): # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it - url, rev = super(Bazaar, self).get_url_rev() + url, rev = super(Bazaar, self).get_url_rev(url) if url.startswith('ssh://'): url = 'bzr+' + url return url, rev diff --git a/pipenv/patched/notpip/_internal/vcs/git.py b/pipenv/patched/notpip/_internal/vcs/git.py index dd862bfe58..ef2dd908b0 100644 --- a/pipenv/patched/notpip/_internal/vcs/git.py +++ b/pipenv/patched/notpip/_internal/vcs/git.py @@ -43,7 +43,7 @@ class Git(VersionControl): def __init__(self, url=None, *args, **kwargs): # Works around an apparent Git bug - # (see http://article.gmane.org/gmane.comp.version-control.git/146500) + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) if url: scheme, netloc, path, query, fragment = urlsplit(url) if scheme.endswith('file'): @@ -155,6 +155,33 @@ def is_commit_id_equal(self, dest, name): return self.get_revision(dest) == name + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Cloning %s%s to %s', url, rev_display, display_path(dest), + ) + self.run_command(['clone', '-q', url, dest]) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.check_rev_options(dest, rev_options) + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + rev = rev_options.rev + # Only fetch the revision if it's a ref + if rev.startswith('refs/'): + self.run_command( + ['fetch', '-q', url] + rev_options.to_args(), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + rev = 'FETCH_HEAD' + self.run_command(['checkout', '-q', rev], cwd=dest) + + #: repo may contain submodules + self.update_submodules(dest) + def switch(self, dest, url, rev_options): self.run_command(['config', 'remote.origin.url', url], cwd=dest) cmd_args = ['checkout', '-q'] + rev_options.to_args() @@ -176,35 +203,6 @@ def update(self, dest, rev_options): #: update submodules self.update_submodules(dest) - def obtain(self, dest): - url, rev = self.get_url_rev() - rev_options = self.make_rev_options(rev) - if self.check_destination(dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Cloning %s%s to %s', url, rev_display, display_path(dest), - ) - self.run_command(['clone', '-q', url, dest]) - - if rev: - rev_options = self.check_rev_options(dest, rev_options) - # Only do a checkout if the current commit id doesn't match - # the requested revision. - if not self.is_commit_id_equal(dest, rev_options.rev): - rev = rev_options.rev - # Only fetch the revision if it's a ref - if rev.startswith('refs/'): - self.run_command( - ['fetch', '-q', url] + rev_options.to_args(), - cwd=dest, - ) - # Change the revision to the SHA of the ref we fetched - rev = 'FETCH_HEAD' - self.run_command(['checkout', '-q', rev], cwd=dest) - - #: repo may contain submodules - self.update_submodules(dest) - def get_url(self, location): """Return URL of the first remote encountered.""" remotes = self.run_command( @@ -267,20 +265,20 @@ def get_src_requirement(self, dist, location): req += '&subdirectory=' + subdirectory return req - def get_url_rev(self): + def get_url_rev(self, url): """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. - That's required because although they use SSH they sometimes doesn't - work with a ssh:// scheme (e.g. Github). But we need a scheme for + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ - if '://' not in self.url: - assert 'file:' not in self.url - self.url = self.url.replace('git+', 'git+ssh://') - url, rev = super(Git, self).get_url_rev() + if '://' not in url: + assert 'file:' not in url + url = url.replace('git+', 'git+ssh://') + url, rev = super(Git, self).get_url_rev(url) url = url.replace('ssh://', '') else: - url, rev = super(Git, self).get_url_rev() + url, rev = super(Git, self).get_url_rev(url) return url, rev diff --git a/pipenv/patched/notpip/_internal/vcs/mercurial.py b/pipenv/patched/notpip/_internal/vcs/mercurial.py index e28228fc0a..a143e765f9 100644 --- a/pipenv/patched/notpip/_internal/vcs/mercurial.py +++ b/pipenv/patched/notpip/_internal/vcs/mercurial.py @@ -31,6 +31,18 @@ def export(self, location): ['archive', location], show_stdout=False, cwd=temp_dir.path ) + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Cloning hg %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['clone', '--noupdate', '-q', url, dest]) + cmd_args = ['update', '-q'] + rev_options.to_args() + self.run_command(cmd_args, cwd=dest) + def switch(self, dest, url, rev_options): repo_config = os.path.join(dest, self.dirname, 'hgrc') config = configparser.SafeConfigParser() @@ -52,21 +64,6 @@ def update(self, dest, rev_options): cmd_args = ['update', '-q'] + rev_options.to_args() self.run_command(cmd_args, cwd=dest) - def obtain(self, dest): - url, rev = self.get_url_rev() - rev_options = self.make_rev_options(rev) - if self.check_destination(dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Cloning hg %s%s to %s', - url, - rev_display, - display_path(dest), - ) - self.run_command(['clone', '--noupdate', '-q', url, dest]) - cmd_args = ['update', '-q'] + rev_options.to_args() - self.run_command(cmd_args, cwd=dest) - def get_url(self, location): url = self.run_command( ['showconfig', 'paths.default'], diff --git a/pipenv/patched/notpip/_internal/vcs/subversion.py b/pipenv/patched/notpip/_internal/vcs/subversion.py index 6f480ae340..5adbdaa33b 100644 --- a/pipenv/patched/notpip/_internal/vcs/subversion.py +++ b/pipenv/patched/notpip/_internal/vcs/subversion.py @@ -8,7 +8,7 @@ from pipenv.patched.notpip._internal.index import Link from pipenv.patched.notpip._internal.utils.logging import indent_log -from pipenv.patched.notpip._internal.utils.misc import display_path, rmtree +from pipenv.patched.notpip._internal.utils.misc import display_path, remove_auth_from_url, rmtree from pipenv.patched.notpip._internal.vcs import VersionControl, vcs _svn_xml_url_re = re.compile('url="([^"]+)"') @@ -61,9 +61,8 @@ def get_info(self, location): def export(self, location): """Export the svn repository at the url to the destination location""" - url, rev = self.get_url_rev() - rev_options = get_rev_options(self, url, rev) - url = self.remove_auth_from_url(url) + url, rev_options = self.get_url_rev_options(self.url) + logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): @@ -73,6 +72,17 @@ def export(self, location): cmd_args = ['export'] + rev_options.to_args() + [url, location] self.run_command(cmd_args, show_stdout=False) + def fetch_new(self, dest, url, rev_options): + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] + self.run_command(cmd_args) + def switch(self, dest, url, rev_options): cmd_args = ['switch'] + rev_options.to_args() + [url, dest] self.run_command(cmd_args) @@ -81,21 +91,6 @@ def update(self, dest, rev_options): cmd_args = ['update'] + rev_options.to_args() + [dest] self.run_command(cmd_args) - def obtain(self, dest): - url, rev = self.get_url_rev() - rev_options = get_rev_options(self, url, rev) - url = self.remove_auth_from_url(url) - if self.check_destination(dest, url, rev_options): - rev_display = rev_options.to_display() - logger.info( - 'Checking out %s%s to %s', - url, - rev_display, - display_path(dest), - ) - cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] - self.run_command(cmd_args) - def get_location(self, dist, dependency_links): for url in dependency_links: egg_fragment = Link(url).egg_fragment @@ -137,13 +132,19 @@ def get_revision(self, location): revision = max(revision, localrev) return revision - def get_url_rev(self): + def get_url_rev(self, url): # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it - url, rev = super(Subversion, self).get_url_rev() + url, rev = super(Subversion, self).get_url_rev(url) if url.startswith('ssh://'): url = 'svn+' + url return url, rev + def get_url_rev_args(self, url): + extra_args = get_rev_options_args(url) + url = remove_auth_from_url(url) + + return url, extra_args + def get_url(self, location): # In cases where the source is in a subdirectory, not alongside # setup.py we have to look up in the location until we find a real @@ -221,28 +222,10 @@ def is_commit_id_equal(self, dest, name): """Always assume the versions don't match""" return False - @staticmethod - def remove_auth_from_url(url): - # Return a copy of url with 'username:password@' removed. - # username/pass params are passed to subversion through flags - # and are not recognized in the url. - - # parsed url - purl = urllib_parse.urlsplit(url) - stripped_netloc = \ - purl.netloc.split('@')[-1] - - # stripped url - url_pieces = ( - purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment - ) - surl = urllib_parse.urlunsplit(url_pieces) - return surl - -def get_rev_options(vcs, url, rev): +def get_rev_options_args(url): """ - Return a RevOptions object. + Return the extra arguments to pass to RevOptions. """ r = urllib_parse.urlsplit(url) if hasattr(r, 'username'): @@ -265,7 +248,7 @@ def get_rev_options(vcs, url, rev): if password: extra_args += ['--password', password] - return vcs.make_rev_options(rev, extra_args=extra_args) + return extra_args vcs.register(Subversion) diff --git a/pipenv/patched/notpip/_internal/wheel.py b/pipenv/patched/notpip/_internal/wheel.py index fe3bf7d239..14ec00143c 100644 --- a/pipenv/patched/notpip/_internal/wheel.py +++ b/pipenv/patched/notpip/_internal/wheel.py @@ -5,7 +5,6 @@ import collections import compileall -import copy import csv import hashlib import logging @@ -24,7 +23,6 @@ from pipenv.patched.notpip._vendor.six import StringIO from pipenv.patched.notpip._internal import pep425tags -from pipenv.patched.notpip._internal.build_env import BuildEnvironment from pipenv.patched.notpip._internal.download import path_to_url, unpack_url from pipenv.patched.notpip._internal.exceptions import ( InstallationError, InvalidWheelFilename, UnsupportedWheel, @@ -42,7 +40,7 @@ from pipenv.patched.notpip._internal.utils.ui import open_spinner if MYPY_CHECK_RUNNING: - from typing import Dict, List, Optional + from typing import Dict, List, Optional # noqa: F401 wheel_ext = '.whl' @@ -52,9 +50,9 @@ logger = logging.getLogger(__name__) -def rehash(path, algo='sha256', blocksize=1 << 20): - """Return (hash, length) for path using hashlib.new(algo)""" - h = hashlib.new(algo) +def rehash(path, blocksize=1 << 20): + """Return (hash, length) for path using hashlib.sha256()""" + h = hashlib.sha256() length = 0 with open(path, 'rb') as f: for block in read_chunks(f, size=blocksize): @@ -164,7 +162,8 @@ def message_about_scripts_not_on_PATH(scripts): # We don't want to warn for directories that are on PATH. not_warn_dirs = [ - os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep) + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) ] # If an executable sits with sys.executable, we don't warn for it. # This covers the case of venv invocations without activating the venv. @@ -284,6 +283,17 @@ def clobber(source, dest, is_base, fixer=None, filter=None): # uninstalled. ensure_dir(destdir) + # copyfile (called below) truncates the destination if it + # exists and then writes the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(destfile): + os.unlink(destfile) + # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying @@ -496,8 +506,8 @@ def _get_script_text(entry): row[1], row[2] = rehash(row[0]) writer.writerow(row) for f in generated: - h, l = rehash(f) - writer.writerow((normpath(f, lib_dir), h, l)) + digest, length = rehash(f) + writer.writerow((normpath(f, lib_dir), digest, length)) for f in installed: writer.writerow((installed[f], '', '')) shutil.move(temp_record, record) @@ -518,7 +528,7 @@ def wheel_version(source_dir): version = wheel_data['Wheel-Version'].strip() version = tuple(map(int, version.split('.'))) return version - except: + except Exception: return False @@ -643,7 +653,7 @@ def _build_one_inside_env(self, req, output_dir, python_tag=None): ) logger.info('Stored in directory: %s', output_dir) return wheel_path - except: + except Exception: pass # Ignore return, we can't do anything else useful. self._clean_one(req) @@ -675,7 +685,7 @@ def __build_one(self, req, tempd, python_tag=None): call_subprocess(wheel_args, cwd=req.setup_py_dir, show_stdout=False, spinner=spinner) return True - except: + except Exception: spinner.finish("error") logger.error('Failed building wheel for %s', req.name) return False @@ -688,7 +698,7 @@ def _clean_one(self, req): try: call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) return True - except: + except Exception: logger.error('Failed cleaning build dir for %s', req.name) return False diff --git a/pipenv/patched/notpip/_vendor/__init__.py b/pipenv/patched/notpip/_vendor/__init__.py index 40ce7a0114..b6294b2140 100644 --- a/pipenv/patched/notpip/_vendor/__init__.py +++ b/pipenv/patched/notpip/_vendor/__init__.py @@ -107,5 +107,4 @@ def vendored(modulename): vendored("requests.packages.urllib3.util.ssl_") vendored("requests.packages.urllib3.util.timeout") vendored("requests.packages.urllib3.util.url") - -import requests + vendored("urllib3") diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py b/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py index f386d4929c..8fdee66ffe 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py @@ -2,9 +2,9 @@ Make it easy to import from cachecontrol without long namespaces. """ -__author__ = 'Eric Larson' -__email__ = 'eric@ionrock.org' -__version__ = '0.12.4' +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.12.5" from .wrapper import CacheControl from .adapter import CacheControlAdapter diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/_cmd.py b/pipenv/patched/notpip/_vendor/cachecontrol/_cmd.py index b5e44db81e..fb90adf438 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/_cmd.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/_cmd.py @@ -17,14 +17,11 @@ def setup_logging(): def get_session(): adapter = CacheControlAdapter( - DictCache(), - cache_etags=True, - serializer=None, - heuristic=None, + DictCache(), cache_etags=True, serializer=None, heuristic=None ) sess = requests.Session() - sess.mount('http://', adapter) - sess.mount('https://', adapter) + sess.mount("http://", adapter) + sess.mount("https://", adapter) sess.cache_controller = adapter.controller return sess @@ -32,7 +29,7 @@ def get_session(): def get_args(): parser = ArgumentParser() - parser.add_argument('url', help='The URL to try and cache') + parser.add_argument("url", help="The URL to try and cache") return parser.parse_args() @@ -51,10 +48,10 @@ def main(args=None): # Now try to get it if sess.cache_controller.cached_request(resp.request): - print('Cached!') + print("Cached!") else: - print('Not cached :(') + print("Not cached :(") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py b/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py index bcf107ee0d..2f2909988d 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/adapter.py @@ -10,25 +10,27 @@ class CacheControlAdapter(HTTPAdapter): - invalidating_methods = set(['PUT', 'DELETE']) - - def __init__(self, cache=None, - cache_etags=True, - controller_class=None, - serializer=None, - heuristic=None, - cacheable_methods=None, - *args, **kw): + invalidating_methods = {"PUT", "DELETE"} + + def __init__( + self, + cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + cacheable_methods=None, + *args, + **kw + ): super(CacheControlAdapter, self).__init__(*args, **kw) self.cache = cache or DictCache() self.heuristic = heuristic - self.cacheable_methods = cacheable_methods or ('GET',) + self.cacheable_methods = cacheable_methods or ("GET",) controller_factory = controller_class or CacheController self.controller = controller_factory( - self.cache, - cache_etags=cache_etags, - serializer=serializer, + self.cache, cache_etags=cache_etags, serializer=serializer ) def send(self, request, cacheable_methods=None, **kw): @@ -43,20 +45,18 @@ def send(self, request, cacheable_methods=None, **kw): except zlib.error: cached_response = None if cached_response: - return self.build_response(request, cached_response, - from_cache=True) + return self.build_response(request, cached_response, from_cache=True) # check for etags and add headers if appropriate - request.headers.update( - self.controller.conditional_headers(request) - ) + request.headers.update(self.controller.conditional_headers(request)) resp = super(CacheControlAdapter, self).send(request, **kw) return resp - def build_response(self, request, response, from_cache=False, - cacheable_methods=None): + def build_response( + self, request, response, from_cache=False, cacheable_methods=None + ): """ Build a response by making a request or using the cache. @@ -101,10 +101,8 @@ def build_response(self, request, response, from_cache=False, response._fp = CallbackFileWrapper( response._fp, functools.partial( - self.controller.cache_response, - request, - response, - ) + self.controller.cache_response, request, response + ), ) if response.chunked: super_update_chunk_length = response._update_chunk_length @@ -113,11 +111,12 @@ def _update_chunk_length(self): super_update_chunk_length() if self.chunk_left == 0: self._fp._close() - response._update_chunk_length = types.MethodType(_update_chunk_length, response) - resp = super(CacheControlAdapter, self).build_response( - request, response - ) + response._update_chunk_length = types.MethodType( + _update_chunk_length, response + ) + + resp = super(CacheControlAdapter, self).build_response(request, response) # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/cache.py b/pipenv/patched/notpip/_vendor/cachecontrol/cache.py index 7389a73f8c..94e07732d9 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/cache.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/cache.py @@ -8,13 +8,13 @@ class BaseCache(object): def get(self, key): - raise NotImplemented() + raise NotImplementedError() def set(self, key, value): - raise NotImplemented() + raise NotImplementedError() def delete(self, key): - raise NotImplemented() + raise NotImplementedError() def close(self): pass diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py b/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py index cbe75851e4..06f7d09ebb 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py @@ -9,7 +9,7 @@ FileNotFoundError except NameError: # py2.X - FileNotFoundError = OSError + FileNotFoundError = (IOError, OSError) def _secure_open_write(filename, fmode): @@ -46,6 +46,7 @@ def _secure_open_write(filename, fmode): fd = os.open(filename, flags, fmode) try: return os.fdopen(fd, "wb") + except: # An error occurred wrapping our FD in a file object os.close(fd) @@ -53,8 +54,16 @@ def _secure_open_write(filename, fmode): class FileCache(BaseCache): - def __init__(self, directory, forever=False, filemode=0o0600, - dirmode=0o0700, use_dir_lock=None, lock_class=None): + + def __init__( + self, + directory, + forever=False, + filemode=0o0600, + dirmode=0o0700, + use_dir_lock=None, + lock_class=None, + ): if use_dir_lock is not None and lock_class is not None: raise ValueError("Cannot use use_dir_lock and lock_class together") @@ -63,12 +72,15 @@ def __init__(self, directory, forever=False, filemode=0o0600, from pipenv.patched.notpip._vendor.lockfile import LockFile from pipenv.patched.notpip._vendor.lockfile.mkdirlockfile import MkdirLockFile except ImportError: - notice = dedent(""" + notice = dedent( + """ NOTE: In order to use the FileCache you must have lockfile installed. You can install it via pip: pip install lockfile - """) + """ + ) raise ImportError(notice) + else: if use_dir_lock: lock_class = MkdirLockFile @@ -95,11 +107,12 @@ def _fn(self, name): def get(self, key): name = self._fn(key) - if not os.path.exists(name): - return None + try: + with open(name, "rb") as fh: + return fh.read() - with open(name, 'rb') as fh: - return fh.read() + except FileNotFoundError: + return None def set(self, key, value): name = self._fn(key) diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py b/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py index c0458856de..0cc5e97998 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py @@ -4,16 +4,6 @@ from pipenv.patched.notpip._vendor.cachecontrol.cache import BaseCache -def total_seconds(td): - """Python 2.6 compatability""" - if hasattr(td, 'total_seconds'): - return int(td.total_seconds()) - - ms = td.microseconds - secs = (td.seconds + td.days * 24 * 3600) - return int((ms + secs * 10**6) / 10**6) - - class RedisCache(BaseCache): def __init__(self, conn): @@ -27,7 +17,7 @@ def set(self, key, value, expires=None): self.conn.set(key, value) else: expires = expires - datetime.utcnow() - self.conn.setex(key, total_seconds(expires), value) + self.conn.setex(key, int(expires.total_seconds()), value) def delete(self, key): self.conn.delete(key) diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/controller.py b/pipenv/patched/notpip/_vendor/cachecontrol/controller.py index 1621813c65..0448910fbc 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/controller.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/controller.py @@ -30,8 +30,10 @@ def parse_uri(uri): class CacheController(object): """An interface to see if request should cached or not. """ - def __init__(self, cache=None, cache_etags=True, serializer=None, - status_codes=None): + + def __init__( + self, cache=None, cache_etags=True, serializer=None, status_codes=None + ): self.cache = cache or DictCache() self.cache_etags = cache_etags self.serializer = serializer or Serializer() @@ -64,34 +66,35 @@ def cache_url(cls, uri): def parse_cache_control(self, headers): known_directives = { # https://tools.ietf.org/html/rfc7234#section-5.2 - 'max-age': (int, True,), - 'max-stale': (int, False,), - 'min-fresh': (int, True,), - 'no-cache': (None, False,), - 'no-store': (None, False,), - 'no-transform': (None, False,), - 'only-if-cached' : (None, False,), - 'must-revalidate': (None, False,), - 'public': (None, False,), - 'private': (None, False,), - 'proxy-revalidate': (None, False,), - 's-maxage': (int, True,) + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), } - cc_headers = headers.get('cache-control', - headers.get('Cache-Control', '')) + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) retval = {} - for cc_directive in cc_headers.split(','): - parts = cc_directive.split('=', 1) + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) directive = parts[0].strip() try: typ, required = known_directives[directive] except KeyError: - logger.debug('Ignoring unknown cache-control directive: %s', - directive) + logger.debug("Ignoring unknown cache-control directive: %s", directive) continue if not typ or not required: @@ -101,11 +104,16 @@ def parse_cache_control(self, headers): retval[directive] = typ(parts[1].strip()) except IndexError: if required: - logger.debug('Missing value for cache-control ' - 'directive: %s', directive) + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) except ValueError: - logger.debug('Invalid value for cache-control directive ' - '%s, must be %s', directive, typ.__name__) + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) return retval @@ -119,24 +127,24 @@ def cached_request(self, request): cc = self.parse_cache_control(request.headers) # Bail out if the request insists on fresh data - if 'no-cache' in cc: + if "no-cache" in cc: logger.debug('Request header has "no-cache", cache bypassed') return False - if 'max-age' in cc and cc['max-age'] == 0: + if "max-age" in cc and cc["max-age"] == 0: logger.debug('Request header has "max_age" as 0, cache bypassed') return False # Request allows serving from the cache, let's see if we find something cache_data = self.cache.get(cache_url) if cache_data is None: - logger.debug('No cache entry available') + logger.debug("No cache entry available") return False # Check whether it can be deserialized resp = self.serializer.loads(request, cache_data) if not resp: - logger.warning('Cache entry deserialization failed, entry ignored') + logger.warning("Cache entry deserialization failed, entry ignored") return False # If we have a cached 301, return it immediately. We don't @@ -148,27 +156,27 @@ def cached_request(self, request): # Client can try to refresh the value by repeating the request # with cache busting headers as usual (ie no-cache). if resp.status == 301: - msg = ('Returning cached "301 Moved Permanently" response ' - '(ignoring date and etag information)') + msg = ( + 'Returning cached "301 Moved Permanently" response ' + "(ignoring date and etag information)" + ) logger.debug(msg) return resp headers = CaseInsensitiveDict(resp.headers) - if not headers or 'date' not in headers: - if 'etag' not in headers: + if not headers or "date" not in headers: + if "etag" not in headers: # Without date or etag, the cached response can never be used # and should be deleted. - logger.debug('Purging cached response: no date or etag') + logger.debug("Purging cached response: no date or etag") self.cache.delete(cache_url) - logger.debug('Ignoring cached response: no date') + logger.debug("Ignoring cached response: no date") return False now = time.time() - date = calendar.timegm( - parsedate_tz(headers['date']) - ) + date = calendar.timegm(parsedate_tz(headers["date"])) current_age = max(0, now - date) - logger.debug('Current age based on date: %i', current_age) + logger.debug("Current age based on date: %i", current_age) # TODO: There is an assumption that the result will be a # urllib3 response object. This may not be best since we @@ -180,45 +188,41 @@ def cached_request(self, request): freshness_lifetime = 0 # Check the max-age pragma in the cache control header - if 'max-age' in resp_cc: - freshness_lifetime = resp_cc['max-age'] - logger.debug('Freshness lifetime from max-age: %i', - freshness_lifetime) + if "max-age" in resp_cc: + freshness_lifetime = resp_cc["max-age"] + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) # If there isn't a max-age, check for an expires header - elif 'expires' in headers: - expires = parsedate_tz(headers['expires']) + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) if expires is not None: expire_time = calendar.timegm(expires) - date freshness_lifetime = max(0, expire_time) - logger.debug("Freshness lifetime from expires: %i", - freshness_lifetime) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) # Determine if we are setting freshness limit in the # request. Note, this overrides what was in the response. - if 'max-age' in cc: - freshness_lifetime = cc['max-age'] - logger.debug('Freshness lifetime from request max-age: %i', - freshness_lifetime) + if "max-age" in cc: + freshness_lifetime = cc["max-age"] + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) - if 'min-fresh' in cc: - min_fresh = cc['min-fresh'] + if "min-fresh" in cc: + min_fresh = cc["min-fresh"] # adjust our current age by our min fresh current_age += min_fresh - logger.debug('Adjusted current age from min-fresh: %i', - current_age) + logger.debug("Adjusted current age from min-fresh: %i", current_age) # Return entry if it is fresh enough if freshness_lifetime > current_age: logger.debug('The response is "fresh", returning cached response') - logger.debug('%i > %i', freshness_lifetime, current_age) + logger.debug("%i > %i", freshness_lifetime, current_age) return resp # we're not fresh. If we don't have an Etag, clear it out - if 'etag' not in headers: - logger.debug( - 'The cached response is "stale" with no etag, purging' - ) + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') self.cache.delete(cache_url) # return the original handler @@ -232,16 +236,15 @@ def conditional_headers(self, request): if resp: headers = CaseInsensitiveDict(resp.headers) - if 'etag' in headers: - new_headers['If-None-Match'] = headers['ETag'] + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] - if 'last-modified' in headers: - new_headers['If-Modified-Since'] = headers['Last-Modified'] + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] return new_headers - def cache_response(self, request, response, body=None, - status_codes=None): + def cache_response(self, request, response, body=None, status_codes=None): """ Algorithm for caching requests. @@ -252,9 +255,7 @@ def cache_response(self, request, response, body=None, cacheable_status_codes = status_codes or self.cacheable_status_codes if response.status not in cacheable_status_codes: logger.debug( - 'Status code %s not in %s', - response.status, - cacheable_status_codes + "Status code %s not in %s", response.status, cacheable_status_codes ) return @@ -264,10 +265,12 @@ def cache_response(self, request, response, body=None, # Content-Length is valid then we can check to see if the body we've # been given matches the expected size, and if it doesn't we'll just # skip trying to cache it. - if (body is not None and - "content-length" in response_headers and - response_headers["content-length"].isdigit() and - int(response_headers["content-length"]) != len(body)): + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): return cc_req = self.parse_cache_control(request.headers) @@ -278,53 +281,49 @@ def cache_response(self, request, response, body=None, # Delete it from the cache if we happen to have it stored there no_store = False - if 'no-store' in cc: + if "no-store" in cc: no_store = True logger.debug('Response header has "no-store"') - if 'no-store' in cc_req: + if "no-store" in cc_req: no_store = True logger.debug('Request header has "no-store"') if no_store and self.cache.get(cache_url): logger.debug('Purging existing cache entry to honor "no-store"') self.cache.delete(cache_url) + if no_store: + return # If we've been given an etag, then keep the response - if self.cache_etags and 'etag' in response_headers: - logger.debug('Caching due to etag') + if self.cache_etags and "etag" in response_headers: + logger.debug("Caching due to etag") self.cache.set( - cache_url, - self.serializer.dumps(request, response, body=body), + cache_url, self.serializer.dumps(request, response, body=body) ) # Add to the cache any 301s. We do this before looking that # the Date headers. elif response.status == 301: - logger.debug('Caching permanant redirect') - self.cache.set( - cache_url, - self.serializer.dumps(request, response) - ) + logger.debug("Caching permanant redirect") + self.cache.set(cache_url, self.serializer.dumps(request, response)) # Add to the cache if the response headers demand it. If there # is no date header then we can't do anything about expiring # the cache. - elif 'date' in response_headers: + elif "date" in response_headers: # cache when there is a max-age > 0 - if 'max-age' in cc and cc['max-age'] > 0: - logger.debug('Caching b/c date exists and max-age > 0') + if "max-age" in cc and cc["max-age"] > 0: + logger.debug("Caching b/c date exists and max-age > 0") self.cache.set( - cache_url, - self.serializer.dumps(request, response, body=body), + cache_url, self.serializer.dumps(request, response, body=body) ) # If the request can expire, it means we should cache it # in the meantime. - elif 'expires' in response_headers: - if response_headers['expires']: - logger.debug('Caching b/c of expires header') + elif "expires" in response_headers: + if response_headers["expires"]: + logger.debug("Caching b/c of expires header") self.cache.set( - cache_url, - self.serializer.dumps(request, response, body=body), + cache_url, self.serializer.dumps(request, response, body=body) ) def update_cached_response(self, request, response): @@ -336,10 +335,7 @@ def update_cached_response(self, request, response): """ cache_url = self.cache_url(request.url) - cached_response = self.serializer.loads( - request, - self.cache.get(cache_url) - ) + cached_response = self.serializer.loads(request, self.cache.get(cache_url)) if not cached_response: # we didn't have a cached response @@ -352,22 +348,20 @@ def update_cached_response(self, request, response): # the cached body invalid. But... just in case, we'll be sure # to strip out ones we know that might be problmatic due to # typical assumptions. - excluded_headers = [ - "content-length", - ] + excluded_headers = ["content-length"] cached_response.headers.update( - dict((k, v) for k, v in response.headers.items() - if k.lower() not in excluded_headers) + dict( + (k, v) + for k, v in response.headers.items() + if k.lower() not in excluded_headers + ) ) # we want a 200 b/c we have content via the cache cached_response.status = 200 # update our cache - self.cache.set( - cache_url, - self.serializer.dumps(request, cached_response), - ) + self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) return cached_response diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/filewrapper.py b/pipenv/patched/notpip/_vendor/cachecontrol/filewrapper.py index f1e1ce0559..30ed4c5a62 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/filewrapper.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/filewrapper.py @@ -27,17 +27,19 @@ def __getattr__(self, name): # self.__fp hasn't been set. # # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers - fp = self.__getattribute__('_CallbackFileWrapper__fp') + fp = self.__getattribute__("_CallbackFileWrapper__fp") return getattr(fp, name) def __is_fp_closed(self): try: return self.__fp.fp is None + except AttributeError: pass try: return self.__fp.closed + except AttributeError: pass @@ -66,7 +68,7 @@ def read(self, amt=None): def _safe_read(self, amt): data = self.__fp._safe_read(amt) - if amt == 2 and data == b'\r\n': + if amt == 2 and data == b"\r\n": # urllib executes this read to toss the CRLF at the end # of the chunk. return data diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/heuristics.py b/pipenv/patched/notpip/_vendor/cachecontrol/heuristics.py index f182ff0266..6c0e9790d5 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/heuristics.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/heuristics.py @@ -46,7 +46,7 @@ def apply(self, response): response.headers.update(updated_headers) warning_header_value = self.warning(response) if warning_header_value is not None: - response.headers.update({'Warning': warning_header_value}) + response.headers.update({"Warning": warning_header_value}) return response @@ -56,15 +56,15 @@ class OneDayCache(BaseHeuristic): Cache the response by providing an expires 1 day in the future. """ + def update_headers(self, response): headers = {} - if 'expires' not in response.headers: - date = parsedate(response.headers['date']) - expires = expire_after(timedelta(days=1), - date=datetime(*date[:6])) - headers['expires'] = datetime_to_header(expires) - headers['cache-control'] = 'public' + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" return headers @@ -78,13 +78,10 @@ def __init__(self, **kw): def update_headers(self, response): expires = expire_after(self.delta) - return { - 'expires': datetime_to_header(expires), - 'cache-control': 'public', - } + return {"expires": datetime_to_header(expires), "cache-control": "public"} def warning(self, response): - tmpl = '110 - Automatically cached for %s. Response might be stale' + tmpl = "110 - Automatically cached for %s. Response might be stale" return tmpl % self.delta @@ -100,27 +97,27 @@ class LastModified(BaseHeuristic): http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 Unlike mozilla we limit this to 24-hr. """ - cacheable_by_default_statuses = set([ + cacheable_by_default_statuses = { 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 - ]) + } def update_headers(self, resp): headers = resp.headers - if 'expires' in headers: + if "expires" in headers: return {} - if 'cache-control' in headers and headers['cache-control'] != 'public': + if "cache-control" in headers and headers["cache-control"] != "public": return {} if resp.status not in self.cacheable_by_default_statuses: return {} - if 'date' not in headers or 'last-modified' not in headers: + if "date" not in headers or "last-modified" not in headers: return {} - date = calendar.timegm(parsedate_tz(headers['date'])) - last_modified = parsedate(headers['last-modified']) + date = calendar.timegm(parsedate_tz(headers["date"])) + last_modified = parsedate(headers["last-modified"]) if date is None or last_modified is None: return {} @@ -132,7 +129,7 @@ def update_headers(self, resp): return {} expires = date + freshness_lifetime - return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} def warning(self, resp): return None diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py b/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py index 0f552ba89f..e7106c0294 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py @@ -48,23 +48,22 @@ def dumps(self, request, response, body=None): u"response": { u"body": body, u"headers": dict( - (text_type(k), text_type(v)) - for k, v in response.headers.items() + (text_type(k), text_type(v)) for k, v in response.headers.items() ), u"status": response.status, u"version": response.version, u"reason": text_type(response.reason), u"strict": response.strict, u"decode_content": response.decode_content, - }, + } } # Construct our vary headers data[u"vary"] = {} if u"vary" in response_headers: - varied_headers = response_headers[u'vary'].split(',') + varied_headers = response_headers[u"vary"].split(",") for header in varied_headers: - header = header.strip() + header = text_type(header).strip() header_value = request.headers.get(header, None) if header_value is not None: header_value = text_type(header_value) @@ -95,7 +94,8 @@ def loads(self, request, data): # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{0}".format(ver))(request, data) + return getattr(self, "_loads_v{}".format(ver))(request, data) + except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None @@ -118,11 +118,11 @@ def prepare_response(self, request, cached): body_raw = cached["response"].pop("body") - headers = CaseInsensitiveDict(data=cached['response']['headers']) - if headers.get('transfer-encoding', '') == 'chunked': - headers.pop('transfer-encoding') + headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") - cached['response']['headers'] = headers + cached["response"]["headers"] = headers try: body = io.BytesIO(body_raw) @@ -133,13 +133,9 @@ def prepare_response(self, request, cached): # fail with: # # TypeError: 'str' does not support the buffer interface - body = io.BytesIO(body_raw.encode('utf8')) + body = io.BytesIO(body_raw.encode("utf8")) - return HTTPResponse( - body=body, - preload_content=False, - **cached["response"] - ) + return HTTPResponse(body=body, preload_content=False, **cached["response"]) def _loads_v0(self, request, data): # The original legacy cache data. This doesn't contain enough @@ -162,16 +158,12 @@ def _loads_v2(self, request, data): return # We need to decode the items that we've base64 encoded - cached["response"]["body"] = _b64_decode_bytes( - cached["response"]["body"] - ) + cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) cached["response"]["headers"] = dict( (_b64_decode_str(k), _b64_decode_str(v)) for k, v in cached["response"]["headers"].items() ) - cached["response"]["reason"] = _b64_decode_str( - cached["response"]["reason"], - ) + cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) cached["vary"] = dict( (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) for k, v in cached["vary"].items() @@ -187,7 +179,7 @@ def _loads_v3(self, request, data): def _loads_v4(self, request, data): try: - cached = msgpack.loads(data, encoding='utf-8') + cached = msgpack.loads(data, encoding="utf-8") except ValueError: return diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py b/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py index b50a6e27dc..265bfc8bc1 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/wrapper.py @@ -2,14 +2,16 @@ from .cache import DictCache -def CacheControl(sess, - cache=None, - cache_etags=True, - serializer=None, - heuristic=None, - controller_class=None, - adapter_class=None, - cacheable_methods=None): +def CacheControl( + sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None, + controller_class=None, + adapter_class=None, + cacheable_methods=None, +): cache = cache or DictCache() adapter_class = adapter_class or CacheControlAdapter @@ -19,9 +21,9 @@ def CacheControl(sess, serializer=serializer, heuristic=heuristic, controller_class=controller_class, - cacheable_methods=cacheable_methods + cacheable_methods=cacheable_methods, ) - sess.mount('http://', adapter) - sess.mount('https://', adapter) + sess.mount("http://", adapter) + sess.mount("https://", adapter) return sess diff --git a/pipenv/patched/notpip/_vendor/certifi/__init__.py b/pipenv/patched/notpip/_vendor/certifi/__init__.py index 556193cefb..0c4963ef60 100644 --- a/pipenv/patched/notpip/_vendor/certifi/__init__.py +++ b/pipenv/patched/notpip/_vendor/certifi/__init__.py @@ -1,3 +1,3 @@ from .core import where, old_where -__version__ = "2018.01.18" +__version__ = "2018.04.16" diff --git a/pipenv/patched/notpip/_vendor/certifi/cacert.pem b/pipenv/patched/notpip/_vendor/certifi/cacert.pem index 101ac98fa4..2713f541c4 100644 --- a/pipenv/patched/notpip/_vendor/certifi/cacert.pem +++ b/pipenv/patched/notpip/_vendor/certifi/cacert.pem @@ -3483,39 +3483,6 @@ AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ 5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su -----END CERTIFICATE----- -# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. -# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. -# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" -# Serial: 156233699172481 -# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e -# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb -# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 ------BEGIN CERTIFICATE----- -MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE -BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn -aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg -QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg -SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 -MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD -VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 -dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF -bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB -IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom -/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR -Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 -4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z -5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 -hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID -AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX -SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l -VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq -URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf -peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF -Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW -+qtB4Uu2NQvAmxU= ------END CERTIFICATE----- - # Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 # Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 # Label: "Certinomis - Root CA" diff --git a/pipenv/patched/notpip/_vendor/distlib/t32.exe b/pipenv/patched/notpip/_vendor/distlib/t32.exe old mode 100755 new mode 100644 diff --git a/pipenv/patched/notpip/_vendor/distlib/t64.exe b/pipenv/patched/notpip/_vendor/distlib/t64.exe old mode 100755 new mode 100644 diff --git a/pipenv/patched/notpip/_vendor/distlib/w32.exe b/pipenv/patched/notpip/_vendor/distlib/w32.exe old mode 100755 new mode 100644 diff --git a/pipenv/patched/notpip/_vendor/distlib/w64.exe b/pipenv/patched/notpip/_vendor/distlib/w64.exe old mode 100755 new mode 100644 diff --git a/pipenv/patched/notpip/_vendor/distro.py b/pipenv/patched/notpip/_vendor/distro.py index 39bfce7979..aa4defc3bd 100644 --- a/pipenv/patched/notpip/_vendor/distro.py +++ b/pipenv/patched/notpip/_vendor/distro.py @@ -1,4 +1,4 @@ -# Copyright 2015,2016 Nir Cohen +# Copyright 2015,2016,2017 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ 3.5 deprecated this function, and Python 3.7 is expected to remove it altogether. Its predecessor function :py:func:`platform.dist` was already deprecated since Python 2.6 and is also expected to be removed in Python 3.7. -Still, there are many cases in which access to Linux distribution information +Still, there are many cases in which access to OS distribution information is needed. See `Python issue 1322 `_ for more information. """ @@ -94,7 +94,7 @@ def linux_distribution(full_distribution_name=True): """ - Return information about the current Linux distribution as a tuple + Return information about the current OS distribution as a tuple ``(id_name, version, codename)`` with items as follows: * ``id_name``: If *full_distribution_name* is false, the result of @@ -110,22 +110,22 @@ def linux_distribution(full_distribution_name=True): The data it returns may not exactly be the same, because it uses more data sources than the original function, and that may lead to different data if - the Linux distribution is not consistent across multiple data sources it + the OS distribution is not consistent across multiple data sources it provides (there are indeed such distributions ...). Another reason for differences is the fact that the :func:`distro.id` method normalizes the distro ID string to a reliable machine-readable value - for a number of popular Linux distributions. + for a number of popular OS distributions. """ return _distro.linux_distribution(full_distribution_name) def id(): """ - Return the distro ID of the current Linux distribution, as a + Return the distro ID of the current distribution, as a machine-readable string. - For a number of Linux distributions, the returned distro ID value is + For a number of OS distributions, the returned distro ID value is *reliable*, in the sense that it is documented and that it does not change across releases of the distribution. @@ -158,6 +158,9 @@ def id(): "scientific" Scientific Linux "slackware" Slackware "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -187,7 +190,7 @@ def id(): * a normalization of the ID is performed, based upon `normalization tables`_. The purpose of this normalization is to ensure that the ID is as reliable as possible, even across incompatible changes - in the Linux distributions. A common reason for an incompatible change is + in the OS distributions. A common reason for an incompatible change is the addition of an os-release file, or the addition of the lsb_release command, with ID values that differ from what was previously determined from the distro release file name. @@ -197,7 +200,7 @@ def id(): def name(pretty=False): """ - Return the name of the current Linux distribution, as a human-readable + Return the name of the current OS distribution, as a human-readable string. If *pretty* is false, the name is returned without version or codename. @@ -236,7 +239,7 @@ def name(pretty=False): def version(pretty=False, best=False): """ - Return the version of the current Linux distribution, as a human-readable + Return the version of the current OS distribution, as a human-readable string. If *pretty* is false, the version is returned without codename (e.g. @@ -280,7 +283,7 @@ def version(pretty=False, best=False): def version_parts(best=False): """ - Return the version of the current Linux distribution as a tuple + Return the version of the current OS distribution as a tuple ``(major, minor, build_number)`` with items as follows: * ``major``: The result of :func:`distro.major_version`. @@ -297,7 +300,7 @@ def version_parts(best=False): def major_version(best=False): """ - Return the major version of the current Linux distribution, as a string, + Return the major version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The major version is the first part of the dot-separated version string. @@ -310,7 +313,7 @@ def major_version(best=False): def minor_version(best=False): """ - Return the minor version of the current Linux distribution, as a string, + Return the minor version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The minor version is the second part of the dot-separated version string. @@ -323,7 +326,7 @@ def minor_version(best=False): def build_number(best=False): """ - Return the build number of the current Linux distribution, as a string, + Return the build number of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The build number is the third part of the dot-separated version string. @@ -337,7 +340,7 @@ def build_number(best=False): def like(): """ Return a space-separated list of distro IDs of distributions that are - closely related to the current Linux distribution in regards to packaging + closely related to the current OS distribution in regards to packaging and programming interfaces, for example distributions the current distribution is a derivative from. @@ -353,7 +356,7 @@ def like(): def codename(): """ - Return the codename for the release of the current Linux distribution, + Return the codename for the release of the current OS distribution, as a string. If the distribution does not have a codename, an empty string is returned. @@ -377,7 +380,7 @@ def codename(): def info(pretty=False, best=False): """ - Return certain machine-readable information items about the current Linux + Return certain machine-readable information items about the current OS distribution in a dictionary, as shown in the following example: .. sourcecode:: python @@ -422,7 +425,7 @@ def info(pretty=False, best=False): def os_release_info(): """ Return a dictionary containing key-value pairs for the information items - from the os-release file data source of the current Linux distribution. + from the os-release file data source of the current OS distribution. See `os-release file`_ for details about these information items. """ @@ -432,7 +435,7 @@ def os_release_info(): def lsb_release_info(): """ Return a dictionary containing key-value pairs for the information items - from the lsb_release command data source of the current Linux distribution. + from the lsb_release command data source of the current OS distribution. See `lsb_release command output`_ for details about these information items. @@ -443,17 +446,25 @@ def lsb_release_info(): def distro_release_info(): """ Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current Linux distribution. + from the distro release file data source of the current OS distribution. See `distro release file`_ for details about these information items. """ return _distro.distro_release_info() +def uname_info(): + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + def os_release_attr(attribute): """ Return a single named information item from the os-release file data source - of the current Linux distribution. + of the current OS distribution. Parameters: @@ -472,7 +483,7 @@ def os_release_attr(attribute): def lsb_release_attr(attribute): """ Return a single named information item from the lsb_release command output - data source of the current Linux distribution. + data source of the current OS distribution. Parameters: @@ -492,7 +503,7 @@ def lsb_release_attr(attribute): def distro_release_attr(attribute): """ Return a single named information item from the distro release file - data source of the current Linux distribution. + data source of the current OS distribution. Parameters: @@ -508,6 +519,23 @@ def distro_release_attr(attribute): return _distro.distro_release_attr(attribute) +def uname_attr(attribute): + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + class cached_property(object): """A version of @property which caches the value. On access, it calls the underlying function and sets the value in `__dict__` so future accesses @@ -525,13 +553,13 @@ def __get__(self, obj, owner): class LinuxDistribution(object): """ - Provides information about a Linux distribution. + Provides information about a OS distribution. This package creates a private module-global instance of this class with default initialization arguments, that is used by the `consolidated accessor functions`_ and `single source accessor functions`_. By using default initialization arguments, that module-global instance - returns data about the current Linux distribution (i.e. the distro this + returns data about the current OS distribution (i.e. the distro this package runs on). Normally, it is not necessary to create additional instances of this class. @@ -544,7 +572,8 @@ class LinuxDistribution(object): def __init__(self, include_lsb=True, os_release_file='', - distro_release_file=''): + distro_release_file='', + include_uname=True): """ The initialization method of this class gathers information from the available data sources, and stores that in private instance attributes. @@ -578,6 +607,11 @@ def __init__(self, distro release file can be found, the data source for the distro release file will be empty. + * ``include_name`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + Public instance attributes: * ``os_release_file`` (string): The path name of the @@ -591,6 +625,10 @@ def __init__(self, * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. This controls whether the lsb information will be loaded. + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + Raises: * :py:exc:`IOError`: Some I/O issue with an os-release file or distro @@ -607,6 +645,7 @@ def __init__(self, os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME) self.distro_release_file = distro_release_file or '' # updated later self.include_lsb = include_lsb + self.include_uname = include_uname def __repr__(self): """Return repr of all info @@ -616,14 +655,16 @@ def __repr__(self): "os_release_file={self.os_release_file!r}, " \ "distro_release_file={self.distro_release_file!r}, " \ "include_lsb={self.include_lsb!r}, " \ + "include_uname={self.include_uname!r}, " \ "_os_release_info={self._os_release_info!r}, " \ "_lsb_release_info={self._lsb_release_info!r}, " \ - "_distro_release_info={self._distro_release_info!r})".format( + "_distro_release_info={self._distro_release_info!r}, " \ + "_uname_info={self._uname_info!r})".format( self=self) def linux_distribution(self, full_distribution_name=True): """ - Return information about the Linux distribution that is compatible + Return information about the OS distribution that is compatible with Python's :func:`platform.linux_distribution`, supporting a subset of its parameters. @@ -636,7 +677,7 @@ def linux_distribution(self, full_distribution_name=True): ) def id(self): - """Return the distro ID of the Linux distribution, as a string. + """Return the distro ID of the OS distribution, as a string. For details, see :func:`distro.id`. """ @@ -656,22 +697,28 @@ def normalize(distro_id, table): if distro_id: return normalize(distro_id, NORMALIZED_DISTRO_ID) + distro_id = self.uname_attr('id') + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + return '' def name(self, pretty=False): """ - Return the name of the Linux distribution, as a string. + Return the name of the OS distribution, as a string. For details, see :func:`distro.name`. """ name = self.os_release_attr('name') \ or self.lsb_release_attr('distributor_id') \ - or self.distro_release_attr('name') + or self.distro_release_attr('name') \ + or self.uname_attr('name') if pretty: name = self.os_release_attr('pretty_name') \ or self.lsb_release_attr('description') if not name: - name = self.distro_release_attr('name') + name = self.distro_release_attr('name') \ + or self.uname_attr('name') version = self.version(pretty=True) if version: name = name + ' ' + version @@ -679,7 +726,7 @@ def name(self, pretty=False): def version(self, pretty=False, best=False): """ - Return the version of the Linux distribution, as a string. + Return the version of the OS distribution, as a string. For details, see :func:`distro.version`. """ @@ -690,7 +737,8 @@ def version(self, pretty=False, best=False): self._parse_distro_release_content( self.os_release_attr('pretty_name')).get('version_id', ''), self._parse_distro_release_content( - self.lsb_release_attr('description')).get('version_id', '') + self.lsb_release_attr('description')).get('version_id', ''), + self.uname_attr('release') ] version = '' if best: @@ -712,7 +760,7 @@ def version(self, pretty=False, best=False): def version_parts(self, best=False): """ - Return the version of the Linux distribution, as a tuple of version + Return the version of the OS distribution, as a tuple of version numbers. For details, see :func:`distro.version_parts`. @@ -736,7 +784,7 @@ def major_version(self, best=False): def minor_version(self, best=False): """ - Return the minor version number of the Linux distribution. + Return the minor version number of the current distribution. For details, see :func:`distro.minor_version`. """ @@ -744,7 +792,7 @@ def minor_version(self, best=False): def build_number(self, best=False): """ - Return the build number of the Linux distribution. + Return the build number of the current distribution. For details, see :func:`distro.build_number`. """ @@ -752,7 +800,7 @@ def build_number(self, best=False): def like(self): """ - Return the IDs of distributions that are like the Linux distribution. + Return the IDs of distributions that are like the OS distribution. For details, see :func:`distro.like`. """ @@ -760,7 +808,7 @@ def like(self): def codename(self): """ - Return the codename of the Linux distribution. + Return the codename of the OS distribution. For details, see :func:`distro.codename`. """ @@ -771,7 +819,7 @@ def codename(self): def info(self, pretty=False, best=False): """ - Return certain machine-readable information about the Linux + Return certain machine-readable information about the OS distribution. For details, see :func:`distro.info`. @@ -791,7 +839,7 @@ def info(self, pretty=False, best=False): def os_release_info(self): """ Return a dictionary containing key-value pairs for the information - items from the os-release file data source of the Linux distribution. + items from the os-release file data source of the OS distribution. For details, see :func:`distro.os_release_info`. """ @@ -800,7 +848,7 @@ def os_release_info(self): def lsb_release_info(self): """ Return a dictionary containing key-value pairs for the information - items from the lsb_release command data source of the Linux + items from the lsb_release command data source of the OS distribution. For details, see :func:`distro.lsb_release_info`. @@ -810,17 +858,25 @@ def lsb_release_info(self): def distro_release_info(self): """ Return a dictionary containing key-value pairs for the information - items from the distro release file data source of the Linux + items from the distro release file data source of the OS distribution. For details, see :func:`distro.distro_release_info`. """ return self._distro_release_info + def uname_info(self): + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + def os_release_attr(self, attribute): """ Return a single named information item from the os-release file data - source of the Linux distribution. + source of the OS distribution. For details, see :func:`distro.os_release_attr`. """ @@ -829,7 +885,7 @@ def os_release_attr(self, attribute): def lsb_release_attr(self, attribute): """ Return a single named information item from the lsb_release command - output data source of the Linux distribution. + output data source of the OS distribution. For details, see :func:`distro.lsb_release_attr`. """ @@ -838,12 +894,21 @@ def lsb_release_attr(self, attribute): def distro_release_attr(self, attribute): """ Return a single named information item from the distro release file - data source of the Linux distribution. + data source of the OS distribution. For details, see :func:`distro.distro_release_attr`. """ return self._distro_release_info.get(attribute, '') + def uname_attr(self, attribute): + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_release_attr`. + """ + return self._uname_info.get(attribute, '') + @cached_property def _os_release_info(self): """ @@ -960,6 +1025,34 @@ def _parse_lsb_release_content(lines): props.update({k.replace(' ', '_').lower(): v.strip()}) return props + @cached_property + def _uname_info(self): + with open(os.devnull, 'w') as devnull: + try: + cmd = ('uname', '-rs') + stdout = subprocess.check_output(cmd, stderr=devnull) + except OSError: + return {} + content = stdout.decode(sys.getfilesystemencoding()).splitlines() + return self._parse_uname_content(content) + + @staticmethod + def _parse_uname_content(lines): + props = {} + match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == 'Linux': + return {} + props['id'] = name.lower() + props['name'] = name + props['release'] = version + return props + @cached_property def _distro_release_info(self): """ @@ -1082,7 +1175,7 @@ def main(): logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) - parser = argparse.ArgumentParser(description="Linux distro info tool") + parser = argparse.ArgumentParser(description="OS distro info tool") parser.add_argument( '--json', '-j', diff --git a/pipenv/patched/notpip/_vendor/idna/LICENSE.rst b/pipenv/patched/notpip/_vendor/idna/LICENSE.rst index 9d38815ebd..3ee64fba29 100644 --- a/pipenv/patched/notpip/_vendor/idna/LICENSE.rst +++ b/pipenv/patched/notpip/_vendor/idna/LICENSE.rst @@ -1,7 +1,7 @@ License ------- -Copyright (c) 2013-2017, Kim Davies. All rights reserved. +Copyright (c) 2013-2018, Kim Davies. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/pipenv/patched/notpip/_vendor/idna/core.py b/pipenv/patched/notpip/_vendor/idna/core.py index b55b664568..090c2c18d5 100644 --- a/pipenv/patched/notpip/_vendor/idna/core.py +++ b/pipenv/patched/notpip/_vendor/idna/core.py @@ -34,7 +34,11 @@ class InvalidCodepointContext(IDNAError): def _combining_class(cp): - return unicodedata.combining(unichr(cp)) + v = unicodedata.combining(unichr(cp)) + if v == 0: + if not unicodedata.name(unichr(cp)): + raise ValueError("Unknown character in unicodedata") + return v def _is_script(cp, script): return intranges_contain(ord(cp), idnadata.scripts[script]) @@ -71,7 +75,6 @@ def check_bidi(label, check_ltr=False): raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) if direction in ['R', 'AL', 'AN']: bidi_label = True - break if not bidi_label and not check_ltr: return True @@ -244,8 +247,13 @@ def check_label(label): if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): continue elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): - if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): if not valid_contexto(label, pos): raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) @@ -317,10 +325,10 @@ def uts46_remap(domain, std3_rules=True, transitional=False): replacement = uts46row[2] if len(uts46row) == 3 else None if (status == "V" or (status == "D" and not transitional) or - (status == "3" and std3_rules and replacement is None)): + (status == "3" and not std3_rules and replacement is None)): output += char elif replacement is not None and (status == "M" or - (status == "3" and std3_rules) or + (status == "3" and not std3_rules) or (status == "D" and transitional)): output += replacement elif status != "I": @@ -344,15 +352,17 @@ def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): labels = s.split('.') else: labels = _unicode_dots_re.split(s) - while labels and not labels[0]: - del labels[0] - if not labels: + if not labels or labels == ['']: raise IDNAError('Empty domain') if labels[-1] == '': del labels[-1] trailing_dot = True for label in labels: - result.append(alabel(label)) + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') if trailing_dot: result.append(b'') s = b'.'.join(result) @@ -373,15 +383,17 @@ def decode(s, strict=False, uts46=False, std3_rules=False): labels = _unicode_dots_re.split(s) else: labels = s.split(u'.') - while labels and not labels[0]: - del labels[0] - if not labels: + if not labels or labels == ['']: raise IDNAError('Empty domain') if not labels[-1]: del labels[-1] trailing_dot = True for label in labels: - result.append(ulabel(label)) + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') if trailing_dot: result.append(u'') return u'.'.join(result) diff --git a/pipenv/patched/notpip/_vendor/idna/idnadata.py b/pipenv/patched/notpip/_vendor/idna/idnadata.py index c48f1b5047..17974e2337 100644 --- a/pipenv/patched/notpip/_vendor/idna/idnadata.py +++ b/pipenv/patched/notpip/_vendor/idna/idnadata.py @@ -1,11 +1,12 @@ # This file is automatically generated by tools/idna-data -__version__ = "6.3.0" +__version__ = "10.0.0" scripts = { 'Greek': ( 0x37000000374, 0x37500000378, 0x37a0000037e, + 0x37f00000380, 0x38400000385, 0x38600000387, 0x3880000038b, @@ -34,7 +35,9 @@ 0x1ff200001ff5, 0x1ff600001fff, 0x212600002127, - 0x101400001018b, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, 0x1d2000001d246, ), 'Han': ( @@ -46,12 +49,14 @@ 0x30210000302a, 0x30380000303c, 0x340000004db6, - 0x4e0000009fcd, + 0x4e0000009feb, 0xf9000000fa6e, 0xfa700000fada, 0x200000002a6d7, 0x2a7000002b735, 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, 0x2f8000002fa1e, ), 'Hebrew': ( @@ -68,7 +73,7 @@ 'Hiragana': ( 0x304100003097, 0x309d000030a0, - 0x1b0010001b002, + 0x1b0010001b11f, 0x1f2000001f201, ), 'Katakana': ( @@ -88,6 +93,7 @@ 0x602: 85, 0x603: 85, 0x604: 85, + 0x605: 85, 0x608: 85, 0x60b: 85, 0x620: 68, @@ -365,7 +371,7 @@ 0x844: 68, 0x845: 68, 0x846: 82, - 0x847: 68, + 0x847: 82, 0x848: 68, 0x849: 82, 0x84a: 68, @@ -373,7 +379,7 @@ 0x84c: 68, 0x84d: 68, 0x84e: 68, - 0x84f: 82, + 0x84f: 68, 0x850: 68, 0x851: 68, 0x852: 68, @@ -383,7 +389,19 @@ 0x856: 85, 0x857: 85, 0x858: 85, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, 0x8a0: 68, + 0x8a1: 68, 0x8a2: 68, 0x8a3: 68, 0x8a4: 68, @@ -395,6 +413,23 @@ 0x8aa: 82, 0x8ab: 82, 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8e2: 85, 0x1806: 85, 0x1807: 68, 0x180a: 67, @@ -492,8 +527,8 @@ 0x1882: 85, 0x1883: 85, 0x1884: 85, - 0x1885: 85, - 0x1886: 85, + 0x1885: 84, + 0x1886: 84, 0x1887: 68, 0x1888: 68, 0x1889: 68, @@ -531,6 +566,7 @@ 0x18aa: 68, 0x200c: 85, 0x200d: 67, + 0x202f: 85, 0x2066: 85, 0x2067: 85, 0x2068: 85, @@ -587,6 +623,141 @@ 0xa871: 68, 0xa872: 76, 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, } codepoint_classes = { 'PVALID': ( @@ -858,6 +1029,10 @@ 0x52300000524, 0x52500000526, 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, 0x5590000055a, 0x56100000587, 0x591000005be, @@ -881,15 +1056,14 @@ 0x7c0000007f6, 0x8000000082e, 0x8400000085c, - 0x8a0000008a1, - 0x8a2000008ad, - 0x8e4000008ff, - 0x90000000958, + 0x8600000086b, + 0x8a0000008b5, + 0x8b6000008be, + 0x8d4000008e2, + 0x8e300000958, 0x96000000964, 0x96600000970, - 0x97100000978, - 0x97900000980, - 0x98100000984, + 0x97100000984, 0x9850000098d, 0x98f00000991, 0x993000009a9, @@ -902,6 +1076,7 @@ 0x9d7000009d8, 0x9e0000009e4, 0x9e6000009f2, + 0x9fc000009fd, 0xa0100000a04, 0xa0500000a0b, 0xa0f00000a11, @@ -930,6 +1105,7 @@ 0xad000000ad1, 0xae000000ae4, 0xae600000af0, + 0xaf900000b00, 0xb0100000b04, 0xb0500000b0d, 0xb0f00000b11, @@ -960,20 +1136,19 @@ 0xbd000000bd1, 0xbd700000bd8, 0xbe600000bf0, - 0xc0100000c04, + 0xc0000000c04, 0xc0500000c0d, 0xc0e00000c11, 0xc1200000c29, - 0xc2a00000c34, - 0xc3500000c3a, + 0xc2a00000c3a, 0xc3d00000c45, 0xc4600000c49, 0xc4a00000c4e, 0xc5500000c57, - 0xc5800000c5a, + 0xc5800000c5b, 0xc6000000c64, 0xc6600000c70, - 0xc8200000c84, + 0xc8000000c84, 0xc8500000c8d, 0xc8e00000c91, 0xc9200000ca9, @@ -987,15 +1162,14 @@ 0xce000000ce4, 0xce600000cf0, 0xcf100000cf3, - 0xd0200000d04, + 0xd0000000d04, 0xd0500000d0d, 0xd0e00000d11, - 0xd1200000d3b, - 0xd3d00000d45, + 0xd1200000d45, 0xd4600000d49, 0xd4a00000d4f, - 0xd5700000d58, - 0xd6000000d64, + 0xd5400000d58, + 0xd5f00000d64, 0xd6600000d70, 0xd7a00000d80, 0xd8200000d84, @@ -1008,6 +1182,7 @@ 0xdcf00000dd5, 0xdd600000dd7, 0xdd800000de0, + 0xde600000df0, 0xdf200000df4, 0xe0100000e33, 0xe3400000e3b, @@ -1082,11 +1257,12 @@ 0x13180000135b, 0x135d00001360, 0x138000001390, - 0x13a0000013f5, + 0x13a0000013f6, 0x14010000166d, 0x166f00001680, 0x16810000169b, 0x16a0000016eb, + 0x16f1000016f9, 0x17000000170d, 0x170e00001715, 0x172000001735, @@ -1103,7 +1279,7 @@ 0x182000001878, 0x1880000018ab, 0x18b0000018f6, - 0x19000000191d, + 0x19000000191f, 0x19200000192c, 0x19300000193c, 0x19460000196e, @@ -1117,6 +1293,7 @@ 0x1a7f00001a8a, 0x1a9000001a9a, 0x1aa700001aa8, + 0x1ab000001abe, 0x1b0000001b4c, 0x1b5000001b5a, 0x1b6b00001b74, @@ -1125,15 +1302,15 @@ 0x1c4000001c4a, 0x1c4d00001c7e, 0x1cd000001cd3, - 0x1cd400001cf7, + 0x1cd400001cfa, 0x1d0000001d2c, 0x1d2f00001d30, 0x1d3b00001d3c, 0x1d4e00001d4f, 0x1d6b00001d78, 0x1d7900001d9b, - 0x1dc000001de7, - 0x1dfc00001e00, + 0x1dc000001dfa, + 0x1dfb00001e00, 0x1e0100001e02, 0x1e0300001e04, 0x1e0500001e06, @@ -1367,11 +1544,11 @@ 0x309d0000309f, 0x30a1000030fb, 0x30fc000030ff, - 0x31050000312e, + 0x31050000312f, 0x31a0000031bb, 0x31f000003200, 0x340000004db6, - 0x4e0000009fcd, + 0x4e0000009feb, 0xa0000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, @@ -1413,7 +1590,9 @@ 0xa6930000a694, 0xa6950000a696, 0xa6970000a698, - 0xa69f0000a6e6, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, 0xa6f00000a6f2, 0xa7170000a720, 0xa7230000a724, @@ -1463,30 +1642,39 @@ 0xa7850000a786, 0xa7870000a789, 0xa78c0000a78d, - 0xa78e0000a78f, + 0xa78e0000a790, 0xa7910000a792, - 0xa7930000a794, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, 0xa7a10000a7a2, 0xa7a30000a7a4, 0xa7a50000a7a6, 0xa7a70000a7a8, 0xa7a90000a7aa, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7f70000a7f8, 0xa7fa0000a828, 0xa8400000a874, - 0xa8800000a8c5, + 0xa8800000a8c6, 0xa8d00000a8da, 0xa8e00000a8f8, 0xa8fb0000a8fc, + 0xa8fd0000a8fe, 0xa9000000a92e, 0xa9300000a954, 0xa9800000a9c1, 0xa9cf0000a9da, + 0xa9e00000a9ff, 0xaa000000aa37, 0xaa400000aa4e, 0xaa500000aa5a, 0xaa600000aa77, - 0xaa7a0000aa7c, - 0xaa800000aac3, + 0xaa7a0000aac3, 0xaadb0000aade, 0xaae00000aaf0, 0xaaf20000aaf7, @@ -1495,6 +1683,8 @@ 0xab110000ab17, 0xab200000ab27, 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab66, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1507,7 +1697,7 @@ 0xfa230000fa25, 0xfa270000fa2a, 0xfb1e0000fb1f, - 0xfe200000fe27, + 0xfe200000fe30, 0xfe730000fe74, 0x100000001000c, 0x1000d00010027, @@ -1519,20 +1709,32 @@ 0x101fd000101fe, 0x102800001029d, 0x102a0000102d1, - 0x103000001031f, - 0x1033000010341, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, 0x103420001034a, + 0x103500001037b, 0x103800001039e, 0x103a0000103c4, 0x103c8000103d0, 0x104280001049e, 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, 0x1080000010806, 0x1080800010809, 0x1080a00010836, 0x1083700010839, 0x1083c0001083d, 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, 0x1090000010916, 0x109200001093a, 0x10980000109b8, @@ -1545,31 +1747,137 @@ 0x10a3800010a3b, 0x10a3f00010a40, 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, 0x10b0000010b36, 0x10b4000010b56, 0x10b6000010b73, + 0x10b8000010b92, 0x10c0000010c49, + 0x10cc000010cf3, 0x1100000011047, 0x1106600011070, - 0x11080000110bb, + 0x1107f000110bb, 0x110d0000110e9, 0x110f0000110fa, 0x1110000011135, 0x1113600011140, + 0x1115000011174, + 0x1117600011177, 0x11180000111c5, - 0x111d0000111da, + 0x111ca000111cd, + 0x111d0000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133c00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, 0x11680000116b8, 0x116c0000116ca, - 0x120000001236f, + 0x117000001171a, + 0x1171d0001172c, + 0x117300001173a, + 0x118c0000118ea, + 0x118ff00011900, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a84, + 0x11a8600011a9a, + 0x11ac000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x120000001239a, + 0x1248000012544, 0x130000001342f, + 0x1440000014647, 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, 0x16f0000016f45, 0x16f5000016f7f, 0x16f8f00016fa0, - 0x1b0000001b002, + 0x16fe000016fe2, + 0x17000000187ed, + 0x1880000018af3, + 0x1b0000001b11f, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94b, + 0x1e9500001e95a, 0x200000002a6d7, 0x2a7000002b735, 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/pipenv/patched/notpip/_vendor/idna/package_data.py b/pipenv/patched/notpip/_vendor/idna/package_data.py index fc3313927b..39c192bae6 100644 --- a/pipenv/patched/notpip/_vendor/idna/package_data.py +++ b/pipenv/patched/notpip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '2.6' +__version__ = '2.7' diff --git a/pipenv/patched/notpip/_vendor/idna/uts46data.py b/pipenv/patched/notpip/_vendor/idna/uts46data.py index f9b3236f4a..79731cb9e7 100644 --- a/pipenv/patched/notpip/_vendor/idna/uts46data.py +++ b/pipenv/patched/notpip/_vendor/idna/uts46data.py @@ -4,7 +4,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = "6.3.0" +__version__ = "10.0.0" def _seg_0(): return [ (0x0, '3'), @@ -635,7 +635,8 @@ def _seg_6(): (0x37A, '3', u' ι'), (0x37B, 'V'), (0x37E, '3', u';'), - (0x37F, 'X'), + (0x37F, 'M', u'ϳ'), + (0x380, 'X'), (0x384, '3', u' ́'), (0x385, '3', u' ̈́'), (0x386, 'M', u'ά'), @@ -730,11 +731,11 @@ def _seg_6(): (0x400, 'M', u'ѐ'), (0x401, 'M', u'ё'), (0x402, 'M', u'ђ'), - (0x403, 'M', u'ѓ'), ] def _seg_7(): return [ + (0x403, 'M', u'ѓ'), (0x404, 'M', u'є'), (0x405, 'M', u'ѕ'), (0x406, 'M', u'і'), @@ -834,11 +835,11 @@ def _seg_7(): (0x49B, 'V'), (0x49C, 'M', u'ҝ'), (0x49D, 'V'), - (0x49E, 'M', u'ҟ'), ] def _seg_8(): return [ + (0x49E, 'M', u'ҟ'), (0x49F, 'V'), (0x4A0, 'M', u'ҡ'), (0x4A1, 'V'), @@ -938,11 +939,11 @@ def _seg_8(): (0x500, 'M', u'ԁ'), (0x501, 'V'), (0x502, 'M', u'ԃ'), - (0x503, 'V'), ] def _seg_9(): return [ + (0x503, 'V'), (0x504, 'M', u'ԅ'), (0x505, 'V'), (0x506, 'M', u'ԇ'), @@ -979,7 +980,15 @@ def _seg_9(): (0x525, 'V'), (0x526, 'M', u'ԧ'), (0x527, 'V'), - (0x528, 'X'), + (0x528, 'M', u'ԩ'), + (0x529, 'V'), + (0x52A, 'M', u'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', u'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', u'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), (0x531, 'M', u'ա'), (0x532, 'M', u'բ'), (0x533, 'M', u'գ'), @@ -1026,7 +1035,7 @@ def _seg_9(): (0x588, 'X'), (0x589, 'V'), (0x58B, 'X'), - (0x58F, 'V'), + (0x58D, 'V'), (0x590, 'X'), (0x591, 'V'), (0x5C8, 'X'), @@ -1034,6 +1043,10 @@ def _seg_9(): (0x5EB, 'X'), (0x5F0, 'V'), (0x5F5, 'X'), + ] + +def _seg_10(): + return [ (0x606, 'V'), (0x61C, 'X'), (0x61E, 'V'), @@ -1043,10 +1056,6 @@ def _seg_9(): (0x678, 'M', u'يٴ'), (0x679, 'V'), (0x6DD, 'X'), - ] - -def _seg_10(): - return [ (0x6DE, 'V'), (0x70E, 'X'), (0x710, 'V'), @@ -1063,13 +1072,15 @@ def _seg_10(): (0x85C, 'X'), (0x85E, 'V'), (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), (0x8A0, 'V'), - (0x8A1, 'X'), - (0x8A2, 'V'), - (0x8AD, 'X'), - (0x8E4, 'V'), - (0x8FF, 'X'), - (0x900, 'V'), + (0x8B5, 'X'), + (0x8B6, 'V'), + (0x8BE, 'X'), + (0x8D4, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), (0x958, 'M', u'क़'), (0x959, 'M', u'ख़'), (0x95A, 'M', u'ग़'), @@ -1079,10 +1090,6 @@ def _seg_10(): (0x95E, 'M', u'फ़'), (0x95F, 'M', u'य़'), (0x960, 'V'), - (0x978, 'X'), - (0x979, 'V'), - (0x980, 'X'), - (0x981, 'V'), (0x984, 'X'), (0x985, 'V'), (0x98D, 'X'), @@ -1111,7 +1118,7 @@ def _seg_10(): (0x9E0, 'V'), (0x9E4, 'X'), (0x9E6, 'V'), - (0x9FC, 'X'), + (0x9FE, 'X'), (0xA01, 'V'), (0xA04, 'X'), (0xA05, 'V'), @@ -1140,6 +1147,10 @@ def _seg_10(): (0xA4E, 'X'), (0xA51, 'V'), (0xA52, 'X'), + ] + +def _seg_11(): + return [ (0xA59, 'M', u'ਖ਼'), (0xA5A, 'M', u'ਗ਼'), (0xA5B, 'M', u'ਜ਼'), @@ -1147,10 +1158,6 @@ def _seg_10(): (0xA5D, 'X'), (0xA5E, 'M', u'ਫ਼'), (0xA5F, 'X'), - ] - -def _seg_11(): - return [ (0xA66, 'V'), (0xA76, 'X'), (0xA81, 'V'), @@ -1179,6 +1186,8 @@ def _seg_11(): (0xAE4, 'X'), (0xAE6, 'V'), (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), (0xB01, 'V'), (0xB04, 'X'), (0xB05, 'V'), @@ -1240,8 +1249,12 @@ def _seg_11(): (0xBD8, 'X'), (0xBE6, 'V'), (0xBFB, 'X'), - (0xC01, 'V'), + (0xC00, 'V'), (0xC04, 'X'), + ] + +def _seg_12(): + return [ (0xC05, 'V'), (0xC0D, 'X'), (0xC0E, 'V'), @@ -1249,12 +1262,6 @@ def _seg_11(): (0xC12, 'V'), (0xC29, 'X'), (0xC2A, 'V'), - (0xC34, 'X'), - (0xC35, 'V'), - ] - -def _seg_12(): - return [ (0xC3A, 'X'), (0xC3D, 'V'), (0xC45, 'X'), @@ -1265,14 +1272,12 @@ def _seg_12(): (0xC55, 'V'), (0xC57, 'X'), (0xC58, 'V'), - (0xC5A, 'X'), + (0xC5B, 'X'), (0xC60, 'V'), (0xC64, 'X'), (0xC66, 'V'), (0xC70, 'X'), (0xC78, 'V'), - (0xC80, 'X'), - (0xC82, 'V'), (0xC84, 'X'), (0xC85, 'V'), (0xC8D, 'X'), @@ -1300,27 +1305,21 @@ def _seg_12(): (0xCF0, 'X'), (0xCF1, 'V'), (0xCF3, 'X'), - (0xD02, 'V'), + (0xD00, 'V'), (0xD04, 'X'), (0xD05, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), (0xD11, 'X'), (0xD12, 'V'), - (0xD3B, 'X'), - (0xD3D, 'V'), (0xD45, 'X'), (0xD46, 'V'), (0xD49, 'X'), (0xD4A, 'V'), - (0xD4F, 'X'), - (0xD57, 'V'), - (0xD58, 'X'), - (0xD60, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), (0xD64, 'X'), (0xD66, 'V'), - (0xD76, 'X'), - (0xD79, 'V'), (0xD80, 'X'), (0xD82, 'V'), (0xD84, 'X'), @@ -1342,6 +1341,8 @@ def _seg_12(): (0xDD7, 'X'), (0xDD8, 'V'), (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), (0xDF2, 'V'), (0xDF5, 'X'), (0xE01, 'V'), @@ -1354,11 +1355,11 @@ def _seg_12(): (0xE83, 'X'), (0xE84, 'V'), (0xE85, 'X'), - (0xE87, 'V'), ] def _seg_13(): return [ + (0xE87, 'V'), (0xE89, 'X'), (0xE8A, 'V'), (0xE8B, 'X'), @@ -1458,11 +1459,11 @@ def _seg_13(): (0x124E, 'X'), (0x1250, 'V'), (0x1257, 'X'), - (0x1258, 'V'), ] def _seg_14(): return [ + (0x1258, 'V'), (0x1259, 'X'), (0x125A, 'V'), (0x125E, 'X'), @@ -1493,13 +1494,20 @@ def _seg_14(): (0x1380, 'V'), (0x139A, 'X'), (0x13A0, 'V'), - (0x13F5, 'X'), + (0x13F6, 'X'), + (0x13F8, 'M', u'Ᏸ'), + (0x13F9, 'M', u'Ᏹ'), + (0x13FA, 'M', u'Ᏺ'), + (0x13FB, 'M', u'Ᏻ'), + (0x13FC, 'M', u'Ᏼ'), + (0x13FD, 'M', u'Ᏽ'), + (0x13FE, 'X'), (0x1400, 'V'), (0x1680, 'X'), (0x1681, 'V'), (0x169D, 'X'), (0x16A0, 'V'), - (0x16F1, 'X'), + (0x16F9, 'X'), (0x1700, 'V'), (0x170D, 'X'), (0x170E, 'V'), @@ -1536,7 +1544,7 @@ def _seg_14(): (0x18B0, 'V'), (0x18F6, 'X'), (0x1900, 'V'), - (0x191D, 'X'), + (0x191F, 'X'), (0x1920, 'V'), (0x192C, 'X'), (0x1930, 'V'), @@ -1555,6 +1563,10 @@ def _seg_14(): (0x19DB, 'X'), (0x19DE, 'V'), (0x1A1C, 'X'), + ] + +def _seg_15(): + return [ (0x1A1E, 'V'), (0x1A5F, 'X'), (0x1A60, 'V'), @@ -1563,12 +1575,10 @@ def _seg_14(): (0x1A8A, 'X'), (0x1A90, 'V'), (0x1A9A, 'X'), - ] - -def _seg_15(): - return [ (0x1AA0, 'V'), (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ABF, 'X'), (0x1B00, 'V'), (0x1B4C, 'X'), (0x1B50, 'V'), @@ -1580,11 +1590,19 @@ def _seg_15(): (0x1C3B, 'V'), (0x1C4A, 'X'), (0x1C4D, 'V'), - (0x1C80, 'X'), + (0x1C80, 'M', u'в'), + (0x1C81, 'M', u'д'), + (0x1C82, 'M', u'о'), + (0x1C83, 'M', u'с'), + (0x1C84, 'M', u'т'), + (0x1C86, 'M', u'ъ'), + (0x1C87, 'M', u'ѣ'), + (0x1C88, 'M', u'ꙋ'), + (0x1C89, 'X'), (0x1CC0, 'V'), (0x1CC8, 'X'), (0x1CD0, 'V'), - (0x1CF7, 'X'), + (0x1CFA, 'X'), (0x1D00, 'V'), (0x1D2C, 'M', u'a'), (0x1D2D, 'M', u'æ'), @@ -1649,6 +1667,10 @@ def _seg_15(): (0x1D68, 'M', u'ρ'), (0x1D69, 'M', u'φ'), (0x1D6A, 'M', u'χ'), + ] + +def _seg_16(): + return [ (0x1D6B, 'V'), (0x1D78, 'M', u'н'), (0x1D79, 'V'), @@ -1667,10 +1689,6 @@ def _seg_15(): (0x1DA7, 'M', u'ᵻ'), (0x1DA8, 'M', u'ʝ'), (0x1DA9, 'M', u'ɭ'), - ] - -def _seg_16(): - return [ (0x1DAA, 'M', u'ᶅ'), (0x1DAB, 'M', u'ʟ'), (0x1DAC, 'M', u'ɱ'), @@ -1694,8 +1712,8 @@ def _seg_16(): (0x1DBE, 'M', u'ʒ'), (0x1DBF, 'M', u'θ'), (0x1DC0, 'V'), - (0x1DE7, 'X'), - (0x1DFC, 'V'), + (0x1DFA, 'X'), + (0x1DFB, 'V'), (0x1E00, 'M', u'ḁ'), (0x1E01, 'V'), (0x1E02, 'M', u'ḃ'), @@ -1753,6 +1771,10 @@ def _seg_16(): (0x1E36, 'M', u'ḷ'), (0x1E37, 'V'), (0x1E38, 'M', u'ḹ'), + ] + +def _seg_17(): + return [ (0x1E39, 'V'), (0x1E3A, 'M', u'ḻ'), (0x1E3B, 'V'), @@ -1771,10 +1793,6 @@ def _seg_16(): (0x1E48, 'M', u'ṉ'), (0x1E49, 'V'), (0x1E4A, 'M', u'ṋ'), - ] - -def _seg_17(): - return [ (0x1E4B, 'V'), (0x1E4C, 'M', u'ṍ'), (0x1E4D, 'V'), @@ -1857,6 +1875,10 @@ def _seg_17(): (0x1E9F, 'V'), (0x1EA0, 'M', u'ạ'), (0x1EA1, 'V'), + ] + +def _seg_18(): + return [ (0x1EA2, 'M', u'ả'), (0x1EA3, 'V'), (0x1EA4, 'M', u'ấ'), @@ -1875,10 +1897,6 @@ def _seg_17(): (0x1EB1, 'V'), (0x1EB2, 'M', u'ẳ'), (0x1EB3, 'V'), - ] - -def _seg_18(): - return [ (0x1EB4, 'M', u'ẵ'), (0x1EB5, 'V'), (0x1EB6, 'M', u'ặ'), @@ -1961,6 +1979,10 @@ def _seg_18(): (0x1F0B, 'M', u'ἃ'), (0x1F0C, 'M', u'ἄ'), (0x1F0D, 'M', u'ἅ'), + ] + +def _seg_19(): + return [ (0x1F0E, 'M', u'ἆ'), (0x1F0F, 'M', u'ἇ'), (0x1F10, 'V'), @@ -1979,10 +2001,6 @@ def _seg_18(): (0x1F2B, 'M', u'ἣ'), (0x1F2C, 'M', u'ἤ'), (0x1F2D, 'M', u'ἥ'), - ] - -def _seg_19(): - return [ (0x1F2E, 'M', u'ἦ'), (0x1F2F, 'M', u'ἧ'), (0x1F30, 'V'), @@ -2065,6 +2083,10 @@ def _seg_19(): (0x1F9A, 'M', u'ἢι'), (0x1F9B, 'M', u'ἣι'), (0x1F9C, 'M', u'ἤι'), + ] + +def _seg_20(): + return [ (0x1F9D, 'M', u'ἥι'), (0x1F9E, 'M', u'ἦι'), (0x1F9F, 'M', u'ἧι'), @@ -2083,10 +2105,6 @@ def _seg_19(): (0x1FAC, 'M', u'ὤι'), (0x1FAD, 'M', u'ὥι'), (0x1FAE, 'M', u'ὦι'), - ] - -def _seg_20(): - return [ (0x1FAF, 'M', u'ὧι'), (0x1FB0, 'V'), (0x1FB2, 'M', u'ὰι'), @@ -2169,6 +2187,10 @@ def _seg_20(): (0x2024, 'X'), (0x2027, 'V'), (0x2028, 'X'), + ] + +def _seg_21(): + return [ (0x202F, '3', u' '), (0x2030, 'V'), (0x2033, 'M', u'′′'), @@ -2187,10 +2209,6 @@ def _seg_20(): (0x204A, 'V'), (0x2057, 'M', u'′′′′'), (0x2058, 'V'), - ] - -def _seg_21(): - return [ (0x205F, '3', u' '), (0x2060, 'I'), (0x2061, 'X'), @@ -2244,7 +2262,7 @@ def _seg_21(): (0x20A0, 'V'), (0x20A8, 'M', u'rs'), (0x20A9, 'V'), - (0x20BB, 'X'), + (0x20C0, 'X'), (0x20D0, 'V'), (0x20F1, 'X'), (0x2100, '3', u'a/c'), @@ -2273,6 +2291,10 @@ def _seg_21(): (0x2120, 'M', u'sm'), (0x2121, 'M', u'tel'), (0x2122, 'M', u'tm'), + ] + +def _seg_22(): + return [ (0x2123, 'V'), (0x2124, 'M', u'z'), (0x2125, 'V'), @@ -2291,10 +2313,6 @@ def _seg_21(): (0x2133, 'M', u'm'), (0x2134, 'M', u'o'), (0x2135, 'M', u'א'), - ] - -def _seg_22(): - return [ (0x2136, 'M', u'ב'), (0x2137, 'M', u'ג'), (0x2138, 'M', u'ד'), @@ -2363,7 +2381,8 @@ def _seg_22(): (0x2183, 'X'), (0x2184, 'V'), (0x2189, 'M', u'0⁄3'), - (0x218A, 'X'), + (0x218A, 'V'), + (0x218C, 'X'), (0x2190, 'V'), (0x222C, 'M', u'∫∫'), (0x222D, 'M', u'∫∫∫'), @@ -2376,10 +2395,12 @@ def _seg_22(): (0x226E, '3'), (0x2270, 'V'), (0x2329, 'M', u'〈'), + ] + +def _seg_23(): + return [ (0x232A, 'M', u'〉'), (0x232B, 'V'), - (0x23F4, 'X'), - (0x2400, 'V'), (0x2427, 'X'), (0x2440, 'V'), (0x244B, 'X'), @@ -2395,10 +2416,6 @@ def _seg_22(): (0x2469, 'M', u'10'), (0x246A, 'M', u'11'), (0x246B, 'M', u'12'), - ] - -def _seg_23(): - return [ (0x246C, 'M', u'13'), (0x246D, 'M', u'14'), (0x246E, 'M', u'15'), @@ -2482,6 +2499,10 @@ def _seg_23(): (0x24CF, 'M', u'z'), (0x24D0, 'M', u'a'), (0x24D1, 'M', u'b'), + ] + +def _seg_24(): + return [ (0x24D2, 'M', u'c'), (0x24D3, 'M', u'd'), (0x24D4, 'M', u'e'), @@ -2499,10 +2520,6 @@ def _seg_23(): (0x24E0, 'M', u'q'), (0x24E1, 'M', u'r'), (0x24E2, 'M', u's'), - ] - -def _seg_24(): - return [ (0x24E3, 'M', u't'), (0x24E4, 'M', u'u'), (0x24E5, 'M', u'v'), @@ -2512,8 +2529,6 @@ def _seg_24(): (0x24E9, 'M', u'z'), (0x24EA, 'M', u'0'), (0x24EB, 'V'), - (0x2700, 'X'), - (0x2701, 'V'), (0x2A0C, 'M', u'∫∫∫∫'), (0x2A0D, 'V'), (0x2A74, '3', u'::='), @@ -2522,9 +2537,17 @@ def _seg_24(): (0x2A77, 'V'), (0x2ADC, 'M', u'⫝̸'), (0x2ADD, 'V'), - (0x2B4D, 'X'), - (0x2B50, 'V'), - (0x2B5A, 'X'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B98, 'V'), + (0x2BBA, 'X'), + (0x2BBD, 'V'), + (0x2BC9, 'X'), + (0x2BCA, 'V'), + (0x2BD3, 'X'), + (0x2BEC, 'V'), + (0x2BF0, 'X'), (0x2C00, 'M', u'ⰰ'), (0x2C01, 'M', u'ⰱ'), (0x2C02, 'M', u'ⰲ'), @@ -2580,6 +2603,10 @@ def _seg_24(): (0x2C62, 'M', u'ɫ'), (0x2C63, 'M', u'ᵽ'), (0x2C64, 'M', u'ɽ'), + ] + +def _seg_25(): + return [ (0x2C65, 'V'), (0x2C67, 'M', u'ⱨ'), (0x2C68, 'V'), @@ -2603,10 +2630,6 @@ def _seg_24(): (0x2C80, 'M', u'ⲁ'), (0x2C81, 'V'), (0x2C82, 'M', u'ⲃ'), - ] - -def _seg_25(): - return [ (0x2C83, 'V'), (0x2C84, 'M', u'ⲅ'), (0x2C85, 'V'), @@ -2684,6 +2707,10 @@ def _seg_25(): (0x2CCD, 'V'), (0x2CCE, 'M', u'ⳏ'), (0x2CCF, 'V'), + ] + +def _seg_26(): + return [ (0x2CD0, 'M', u'ⳑ'), (0x2CD1, 'V'), (0x2CD2, 'M', u'ⳓ'), @@ -2707,10 +2734,6 @@ def _seg_25(): (0x2CEB, 'M', u'ⳬ'), (0x2CEC, 'V'), (0x2CED, 'M', u'ⳮ'), - ] - -def _seg_26(): - return [ (0x2CEE, 'V'), (0x2CF2, 'M', u'ⳳ'), (0x2CF3, 'V'), @@ -2745,7 +2768,7 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E3C, 'X'), + (0x2E4A, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), @@ -2788,6 +2811,10 @@ def _seg_26(): (0x2F20, 'M', u'士'), (0x2F21, 'M', u'夂'), (0x2F22, 'M', u'夊'), + ] + +def _seg_27(): + return [ (0x2F23, 'M', u'夕'), (0x2F24, 'M', u'大'), (0x2F25, 'M', u'女'), @@ -2811,10 +2838,6 @@ def _seg_26(): (0x2F37, 'M', u'弋'), (0x2F38, 'M', u'弓'), (0x2F39, 'M', u'彐'), - ] - -def _seg_27(): - return [ (0x2F3A, 'M', u'彡'), (0x2F3B, 'M', u'彳'), (0x2F3C, 'M', u'心'), @@ -2892,6 +2915,10 @@ def _seg_27(): (0x2F84, 'M', u'至'), (0x2F85, 'M', u'臼'), (0x2F86, 'M', u'舌'), + ] + +def _seg_28(): + return [ (0x2F87, 'M', u'舛'), (0x2F88, 'M', u'舟'), (0x2F89, 'M', u'艮'), @@ -2915,10 +2942,6 @@ def _seg_27(): (0x2F9B, 'M', u'走'), (0x2F9C, 'M', u'足'), (0x2F9D, 'M', u'身'), - ] - -def _seg_28(): - return [ (0x2F9E, 'M', u'車'), (0x2F9F, 'M', u'辛'), (0x2FA0, 'M', u'辰'), @@ -2996,9 +3019,13 @@ def _seg_28(): (0x309F, 'M', u'より'), (0x30A0, 'V'), (0x30FF, 'M', u'コト'), + ] + +def _seg_29(): + return [ (0x3100, 'X'), (0x3105, 'V'), - (0x312E, 'X'), + (0x312F, 'X'), (0x3131, 'M', u'ᄀ'), (0x3132, 'M', u'ᄁ'), (0x3133, 'M', u'ᆪ'), @@ -3019,10 +3046,6 @@ def _seg_28(): (0x3142, 'M', u'ᄇ'), (0x3143, 'M', u'ᄈ'), (0x3144, 'M', u'ᄡ'), - ] - -def _seg_29(): - return [ (0x3145, 'M', u'ᄉ'), (0x3146, 'M', u'ᄊ'), (0x3147, 'M', u'ᄋ'), @@ -3100,6 +3123,10 @@ def _seg_29(): (0x318F, 'X'), (0x3190, 'V'), (0x3192, 'M', u'一'), + ] + +def _seg_30(): + return [ (0x3193, 'M', u'二'), (0x3194, 'M', u'三'), (0x3195, 'M', u'四'), @@ -3123,10 +3150,6 @@ def _seg_29(): (0x3202, '3', u'(ᄃ)'), (0x3203, '3', u'(ᄅ)'), (0x3204, '3', u'(ᄆ)'), - ] - -def _seg_30(): - return [ (0x3205, '3', u'(ᄇ)'), (0x3206, '3', u'(ᄉ)'), (0x3207, '3', u'(ᄋ)'), @@ -3204,6 +3227,10 @@ def _seg_30(): (0x3256, 'M', u'26'), (0x3257, 'M', u'27'), (0x3258, 'M', u'28'), + ] + +def _seg_31(): + return [ (0x3259, 'M', u'29'), (0x325A, 'M', u'30'), (0x325B, 'M', u'31'), @@ -3227,10 +3254,6 @@ def _seg_30(): (0x326D, 'M', u'ᄒ'), (0x326E, 'M', u'가'), (0x326F, 'M', u'나'), - ] - -def _seg_31(): - return [ (0x3270, 'M', u'다'), (0x3271, 'M', u'라'), (0x3272, 'M', u'마'), @@ -3308,6 +3331,10 @@ def _seg_31(): (0x32BA, 'M', u'45'), (0x32BB, 'M', u'46'), (0x32BC, 'M', u'47'), + ] + +def _seg_32(): + return [ (0x32BD, 'M', u'48'), (0x32BE, 'M', u'49'), (0x32BF, 'M', u'50'), @@ -3331,10 +3358,6 @@ def _seg_31(): (0x32D1, 'M', u'イ'), (0x32D2, 'M', u'ウ'), (0x32D3, 'M', u'エ'), - ] - -def _seg_32(): - return [ (0x32D4, 'M', u'オ'), (0x32D5, 'M', u'カ'), (0x32D6, 'M', u'キ'), @@ -3412,6 +3435,10 @@ def _seg_32(): (0x331E, 'M', u'コーポ'), (0x331F, 'M', u'サイクル'), (0x3320, 'M', u'サンチーム'), + ] + +def _seg_33(): + return [ (0x3321, 'M', u'シリング'), (0x3322, 'M', u'センチ'), (0x3323, 'M', u'セント'), @@ -3435,10 +3462,6 @@ def _seg_32(): (0x3335, 'M', u'フラン'), (0x3336, 'M', u'ヘクタール'), (0x3337, 'M', u'ペソ'), - ] - -def _seg_33(): - return [ (0x3338, 'M', u'ペニヒ'), (0x3339, 'M', u'ヘルツ'), (0x333A, 'M', u'ペンス'), @@ -3516,6 +3539,10 @@ def _seg_33(): (0x3382, 'M', u'μa'), (0x3383, 'M', u'ma'), (0x3384, 'M', u'ka'), + ] + +def _seg_34(): + return [ (0x3385, 'M', u'kb'), (0x3386, 'M', u'mb'), (0x3387, 'M', u'gb'), @@ -3539,10 +3566,6 @@ def _seg_33(): (0x3399, 'M', u'fm'), (0x339A, 'M', u'nm'), (0x339B, 'M', u'μm'), - ] - -def _seg_34(): - return [ (0x339C, 'M', u'mm'), (0x339D, 'M', u'cm'), (0x339E, 'M', u'km'), @@ -3620,6 +3643,10 @@ def _seg_34(): (0x33E6, 'M', u'7日'), (0x33E7, 'M', u'8日'), (0x33E8, 'M', u'9日'), + ] + +def _seg_35(): + return [ (0x33E9, 'M', u'10日'), (0x33EA, 'M', u'11日'), (0x33EB, 'M', u'12日'), @@ -3643,14 +3670,10 @@ def _seg_34(): (0x33FD, 'M', u'30日'), (0x33FE, 'M', u'31日'), (0x33FF, 'M', u'gal'), - ] - -def _seg_35(): - return [ (0x3400, 'V'), (0x4DB6, 'X'), (0x4DC0, 'V'), - (0x9FCD, 'X'), + (0x9FEB, 'X'), (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), @@ -3724,11 +3747,20 @@ def _seg_35(): (0xA692, 'M', u'ꚓ'), (0xA693, 'V'), (0xA694, 'M', u'ꚕ'), + ] + +def _seg_36(): + return [ (0xA695, 'V'), (0xA696, 'M', u'ꚗ'), (0xA697, 'V'), - (0xA698, 'X'), - (0xA69F, 'V'), + (0xA698, 'M', u'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', u'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', u'ъ'), + (0xA69D, 'M', u'ь'), + (0xA69E, 'V'), (0xA6F8, 'X'), (0xA700, 'V'), (0xA722, 'M', u'ꜣ'), @@ -3747,10 +3779,6 @@ def _seg_35(): (0xA72F, 'V'), (0xA732, 'M', u'ꜳ'), (0xA733, 'V'), - ] - -def _seg_36(): - return [ (0xA734, 'M', u'ꜵ'), (0xA735, 'V'), (0xA736, 'M', u'ꜷ'), @@ -3823,6 +3851,10 @@ def _seg_36(): (0xA780, 'M', u'ꞁ'), (0xA781, 'V'), (0xA782, 'M', u'ꞃ'), + ] + +def _seg_37(): + return [ (0xA783, 'V'), (0xA784, 'M', u'ꞅ'), (0xA785, 'V'), @@ -3832,12 +3864,20 @@ def _seg_36(): (0xA78C, 'V'), (0xA78D, 'M', u'ɥ'), (0xA78E, 'V'), - (0xA78F, 'X'), (0xA790, 'M', u'ꞑ'), (0xA791, 'V'), (0xA792, 'M', u'ꞓ'), (0xA793, 'V'), - (0xA794, 'X'), + (0xA796, 'M', u'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', u'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', u'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', u'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', u'ꞟ'), + (0xA79F, 'V'), (0xA7A0, 'M', u'ꞡ'), (0xA7A1, 'V'), (0xA7A2, 'M', u'ꞣ'), @@ -3849,12 +3889,22 @@ def _seg_36(): (0xA7A8, 'M', u'ꞩ'), (0xA7A9, 'V'), (0xA7AA, 'M', u'ɦ'), - (0xA7AB, 'X'), + (0xA7AB, 'M', u'ɜ'), + (0xA7AC, 'M', u'ɡ'), + (0xA7AD, 'M', u'ɬ'), + (0xA7AE, 'M', u'ɪ'), + (0xA7AF, 'X'), + (0xA7B0, 'M', u'ʞ'), + (0xA7B1, 'M', u'ʇ'), + (0xA7B2, 'M', u'ʝ'), + (0xA7B3, 'M', u'ꭓ'), + (0xA7B4, 'M', u'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', u'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'X'), + (0xA7F7, 'V'), (0xA7F8, 'M', u'ħ'), - ] - -def _seg_37(): - return [ (0xA7F9, 'M', u'œ'), (0xA7FA, 'V'), (0xA82C, 'X'), @@ -3863,11 +3913,11 @@ def _seg_37(): (0xA840, 'V'), (0xA878, 'X'), (0xA880, 'V'), - (0xA8C5, 'X'), + (0xA8C6, 'X'), (0xA8CE, 'V'), (0xA8DA, 'X'), (0xA8E0, 'V'), - (0xA8FC, 'X'), + (0xA8FE, 'X'), (0xA900, 'V'), (0xA954, 'X'), (0xA95F, 'V'), @@ -3877,7 +3927,7 @@ def _seg_37(): (0xA9CF, 'V'), (0xA9DA, 'X'), (0xA9DE, 'V'), - (0xA9E0, 'X'), + (0xA9FF, 'X'), (0xAA00, 'V'), (0xAA37, 'X'), (0xAA40, 'V'), @@ -3885,8 +3935,6 @@ def _seg_37(): (0xAA50, 'V'), (0xAA5A, 'X'), (0xAA5C, 'V'), - (0xAA7C, 'X'), - (0xAA80, 'V'), (0xAAC3, 'X'), (0xAADB, 'V'), (0xAAF7, 'X'), @@ -3900,6 +3948,97 @@ def _seg_37(): (0xAB27, 'X'), (0xAB28, 'V'), (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', u'ꜧ'), + (0xAB5D, 'M', u'ꬷ'), + (0xAB5E, 'M', u'ɫ'), + (0xAB5F, 'M', u'ꭒ'), + (0xAB60, 'V'), + (0xAB66, 'X'), + ] + +def _seg_38(): + return [ + (0xAB70, 'M', u'Ꭰ'), + (0xAB71, 'M', u'Ꭱ'), + (0xAB72, 'M', u'Ꭲ'), + (0xAB73, 'M', u'Ꭳ'), + (0xAB74, 'M', u'Ꭴ'), + (0xAB75, 'M', u'Ꭵ'), + (0xAB76, 'M', u'Ꭶ'), + (0xAB77, 'M', u'Ꭷ'), + (0xAB78, 'M', u'Ꭸ'), + (0xAB79, 'M', u'Ꭹ'), + (0xAB7A, 'M', u'Ꭺ'), + (0xAB7B, 'M', u'Ꭻ'), + (0xAB7C, 'M', u'Ꭼ'), + (0xAB7D, 'M', u'Ꭽ'), + (0xAB7E, 'M', u'Ꭾ'), + (0xAB7F, 'M', u'Ꭿ'), + (0xAB80, 'M', u'Ꮀ'), + (0xAB81, 'M', u'Ꮁ'), + (0xAB82, 'M', u'Ꮂ'), + (0xAB83, 'M', u'Ꮃ'), + (0xAB84, 'M', u'Ꮄ'), + (0xAB85, 'M', u'Ꮅ'), + (0xAB86, 'M', u'Ꮆ'), + (0xAB87, 'M', u'Ꮇ'), + (0xAB88, 'M', u'Ꮈ'), + (0xAB89, 'M', u'Ꮉ'), + (0xAB8A, 'M', u'Ꮊ'), + (0xAB8B, 'M', u'Ꮋ'), + (0xAB8C, 'M', u'Ꮌ'), + (0xAB8D, 'M', u'Ꮍ'), + (0xAB8E, 'M', u'Ꮎ'), + (0xAB8F, 'M', u'Ꮏ'), + (0xAB90, 'M', u'Ꮐ'), + (0xAB91, 'M', u'Ꮑ'), + (0xAB92, 'M', u'Ꮒ'), + (0xAB93, 'M', u'Ꮓ'), + (0xAB94, 'M', u'Ꮔ'), + (0xAB95, 'M', u'Ꮕ'), + (0xAB96, 'M', u'Ꮖ'), + (0xAB97, 'M', u'Ꮗ'), + (0xAB98, 'M', u'Ꮘ'), + (0xAB99, 'M', u'Ꮙ'), + (0xAB9A, 'M', u'Ꮚ'), + (0xAB9B, 'M', u'Ꮛ'), + (0xAB9C, 'M', u'Ꮜ'), + (0xAB9D, 'M', u'Ꮝ'), + (0xAB9E, 'M', u'Ꮞ'), + (0xAB9F, 'M', u'Ꮟ'), + (0xABA0, 'M', u'Ꮠ'), + (0xABA1, 'M', u'Ꮡ'), + (0xABA2, 'M', u'Ꮢ'), + (0xABA3, 'M', u'Ꮣ'), + (0xABA4, 'M', u'Ꮤ'), + (0xABA5, 'M', u'Ꮥ'), + (0xABA6, 'M', u'Ꮦ'), + (0xABA7, 'M', u'Ꮧ'), + (0xABA8, 'M', u'Ꮨ'), + (0xABA9, 'M', u'Ꮩ'), + (0xABAA, 'M', u'Ꮪ'), + (0xABAB, 'M', u'Ꮫ'), + (0xABAC, 'M', u'Ꮬ'), + (0xABAD, 'M', u'Ꮭ'), + (0xABAE, 'M', u'Ꮮ'), + (0xABAF, 'M', u'Ꮯ'), + (0xABB0, 'M', u'Ꮰ'), + (0xABB1, 'M', u'Ꮱ'), + (0xABB2, 'M', u'Ꮲ'), + (0xABB3, 'M', u'Ꮳ'), + (0xABB4, 'M', u'Ꮴ'), + (0xABB5, 'M', u'Ꮵ'), + (0xABB6, 'M', u'Ꮶ'), + (0xABB7, 'M', u'Ꮷ'), + (0xABB8, 'M', u'Ꮸ'), + (0xABB9, 'M', u'Ꮹ'), + (0xABBA, 'M', u'Ꮺ'), + (0xABBB, 'M', u'Ꮻ'), + (0xABBC, 'M', u'Ꮼ'), + (0xABBD, 'M', u'Ꮽ'), + (0xABBE, 'M', u'Ꮾ'), + (0xABBF, 'M', u'Ꮿ'), (0xABC0, 'V'), (0xABEE, 'X'), (0xABF0, 'V'), @@ -3920,6 +4059,10 @@ def _seg_37(): (0xF907, 'M', u'龜'), (0xF909, 'M', u'契'), (0xF90A, 'M', u'金'), + ] + +def _seg_39(): + return [ (0xF90B, 'M', u'喇'), (0xF90C, 'M', u'奈'), (0xF90D, 'M', u'懶'), @@ -3955,10 +4098,6 @@ def _seg_37(): (0xF92B, 'M', u'狼'), (0xF92C, 'M', u'郎'), (0xF92D, 'M', u'來'), - ] - -def _seg_38(): - return [ (0xF92E, 'M', u'冷'), (0xF92F, 'M', u'勞'), (0xF930, 'M', u'擄'), @@ -4024,6 +4163,10 @@ def _seg_38(): (0xF96C, 'M', u'塞'), (0xF96D, 'M', u'省'), (0xF96E, 'M', u'葉'), + ] + +def _seg_40(): + return [ (0xF96F, 'M', u'說'), (0xF970, 'M', u'殺'), (0xF971, 'M', u'辰'), @@ -4059,10 +4202,6 @@ def _seg_38(): (0xF98F, 'M', u'憐'), (0xF990, 'M', u'戀'), (0xF991, 'M', u'撚'), - ] - -def _seg_39(): - return [ (0xF992, 'M', u'漣'), (0xF993, 'M', u'煉'), (0xF994, 'M', u'璉'), @@ -4128,6 +4267,10 @@ def _seg_39(): (0xF9D0, 'M', u'類'), (0xF9D1, 'M', u'六'), (0xF9D2, 'M', u'戮'), + ] + +def _seg_41(): + return [ (0xF9D3, 'M', u'陸'), (0xF9D4, 'M', u'倫'), (0xF9D5, 'M', u'崙'), @@ -4163,10 +4306,6 @@ def _seg_39(): (0xF9F3, 'M', u'麟'), (0xF9F4, 'M', u'林'), (0xF9F5, 'M', u'淋'), - ] - -def _seg_40(): - return [ (0xF9F6, 'M', u'臨'), (0xF9F7, 'M', u'立'), (0xF9F8, 'M', u'笠'), @@ -4232,6 +4371,10 @@ def _seg_40(): (0xFA39, 'M', u'塀'), (0xFA3A, 'M', u'墨'), (0xFA3B, 'M', u'層'), + ] + +def _seg_42(): + return [ (0xFA3C, 'M', u'屮'), (0xFA3D, 'M', u'悔'), (0xFA3E, 'M', u'慨'), @@ -4267,10 +4410,6 @@ def _seg_40(): (0xFA5C, 'M', u'臭'), (0xFA5D, 'M', u'艹'), (0xFA5F, 'M', u'著'), - ] - -def _seg_41(): - return [ (0xFA60, 'M', u'褐'), (0xFA61, 'M', u'視'), (0xFA62, 'M', u'謁'), @@ -4336,6 +4475,10 @@ def _seg_41(): (0xFA9F, 'M', u'犯'), (0xFAA0, 'M', u'猪'), (0xFAA1, 'M', u'瑱'), + ] + +def _seg_43(): + return [ (0xFAA2, 'M', u'甆'), (0xFAA3, 'M', u'画'), (0xFAA4, 'M', u'瘝'), @@ -4371,10 +4514,6 @@ def _seg_41(): (0xFAC2, 'M', u'輸'), (0xFAC3, 'M', u'遲'), (0xFAC4, 'M', u'醙'), - ] - -def _seg_42(): - return [ (0xFAC5, 'M', u'鉶'), (0xFAC6, 'M', u'陼'), (0xFAC7, 'M', u'難'), @@ -4440,6 +4579,10 @@ def _seg_42(): (0xFB38, 'M', u'טּ'), (0xFB39, 'M', u'יּ'), (0xFB3A, 'M', u'ךּ'), + ] + +def _seg_44(): + return [ (0xFB3B, 'M', u'כּ'), (0xFB3C, 'M', u'לּ'), (0xFB3D, 'X'), @@ -4475,10 +4618,6 @@ def _seg_42(): (0xFB7A, 'M', u'چ'), (0xFB7E, 'M', u'ڇ'), (0xFB82, 'M', u'ڍ'), - ] - -def _seg_43(): - return [ (0xFB84, 'M', u'ڌ'), (0xFB86, 'M', u'ڎ'), (0xFB88, 'M', u'ڈ'), @@ -4544,6 +4683,10 @@ def _seg_43(): (0xFC19, 'M', u'خج'), (0xFC1A, 'M', u'خح'), (0xFC1B, 'M', u'خم'), + ] + +def _seg_45(): + return [ (0xFC1C, 'M', u'سج'), (0xFC1D, 'M', u'سح'), (0xFC1E, 'M', u'سخ'), @@ -4579,10 +4722,6 @@ def _seg_43(): (0xFC3C, 'M', u'كم'), (0xFC3D, 'M', u'كى'), (0xFC3E, 'M', u'كي'), - ] - -def _seg_44(): - return [ (0xFC3F, 'M', u'لج'), (0xFC40, 'M', u'لح'), (0xFC41, 'M', u'لخ'), @@ -4648,6 +4787,10 @@ def _seg_44(): (0xFC7D, 'M', u'في'), (0xFC7E, 'M', u'قى'), (0xFC7F, 'M', u'قي'), + ] + +def _seg_46(): + return [ (0xFC80, 'M', u'كا'), (0xFC81, 'M', u'كل'), (0xFC82, 'M', u'كم'), @@ -4683,10 +4826,6 @@ def _seg_44(): (0xFCA0, 'M', u'به'), (0xFCA1, 'M', u'تج'), (0xFCA2, 'M', u'تح'), - ] - -def _seg_45(): - return [ (0xFCA3, 'M', u'تخ'), (0xFCA4, 'M', u'تم'), (0xFCA5, 'M', u'ته'), @@ -4752,6 +4891,10 @@ def _seg_45(): (0xFCE1, 'M', u'بم'), (0xFCE2, 'M', u'به'), (0xFCE3, 'M', u'تم'), + ] + +def _seg_47(): + return [ (0xFCE4, 'M', u'ته'), (0xFCE5, 'M', u'ثم'), (0xFCE6, 'M', u'ثه'), @@ -4787,10 +4930,6 @@ def _seg_45(): (0xFD04, 'M', u'خي'), (0xFD05, 'M', u'صى'), (0xFD06, 'M', u'صي'), - ] - -def _seg_46(): - return [ (0xFD07, 'M', u'ضى'), (0xFD08, 'M', u'ضي'), (0xFD09, 'M', u'شج'), @@ -4856,6 +4995,10 @@ def _seg_46(): (0xFD57, 'M', u'تمخ'), (0xFD58, 'M', u'جمح'), (0xFD5A, 'M', u'حمي'), + ] + +def _seg_48(): + return [ (0xFD5B, 'M', u'حمى'), (0xFD5C, 'M', u'سحج'), (0xFD5D, 'M', u'سجح'), @@ -4891,10 +5034,6 @@ def _seg_46(): (0xFD87, 'M', u'لمح'), (0xFD89, 'M', u'محج'), (0xFD8A, 'M', u'محم'), - ] - -def _seg_47(): - return [ (0xFD8B, 'M', u'محي'), (0xFD8C, 'M', u'مجح'), (0xFD8D, 'M', u'مجم'), @@ -4960,6 +5099,10 @@ def _seg_47(): (0xFDF3, 'M', u'اكبر'), (0xFDF4, 'M', u'محمد'), (0xFDF5, 'M', u'صلعم'), + ] + +def _seg_49(): + return [ (0xFDF6, 'M', u'رسول'), (0xFDF7, 'M', u'عليه'), (0xFDF8, 'M', u'وسلم'), @@ -4981,7 +5124,7 @@ def _seg_47(): (0xFE18, 'M', u'〗'), (0xFE19, 'X'), (0xFE20, 'V'), - (0xFE27, 'X'), + (0xFE30, 'X'), (0xFE31, 'M', u'—'), (0xFE32, 'M', u'–'), (0xFE33, '3', u'_'), @@ -4995,10 +5138,6 @@ def _seg_47(): (0xFE3C, 'M', u'】'), (0xFE3D, 'M', u'《'), (0xFE3E, 'M', u'》'), - ] - -def _seg_48(): - return [ (0xFE3F, 'M', u'〈'), (0xFE40, 'M', u'〉'), (0xFE41, 'M', u'「'), @@ -5064,6 +5203,10 @@ def _seg_48(): (0xFE8F, 'M', u'ب'), (0xFE93, 'M', u'ة'), (0xFE95, 'M', u'ت'), + ] + +def _seg_50(): + return [ (0xFE99, 'M', u'ث'), (0xFE9D, 'M', u'ج'), (0xFEA1, 'M', u'ح'), @@ -5099,10 +5242,6 @@ def _seg_48(): (0xFF00, 'X'), (0xFF01, '3', u'!'), (0xFF02, '3', u'"'), - ] - -def _seg_49(): - return [ (0xFF03, '3', u'#'), (0xFF04, '3', u'$'), (0xFF05, '3', u'%'), @@ -5168,6 +5307,10 @@ def _seg_49(): (0xFF41, 'M', u'a'), (0xFF42, 'M', u'b'), (0xFF43, 'M', u'c'), + ] + +def _seg_51(): + return [ (0xFF44, 'M', u'd'), (0xFF45, 'M', u'e'), (0xFF46, 'M', u'f'), @@ -5203,10 +5346,6 @@ def _seg_49(): (0xFF64, 'M', u'、'), (0xFF65, 'M', u'・'), (0xFF66, 'M', u'ヲ'), - ] - -def _seg_50(): - return [ (0xFF67, 'M', u'ァ'), (0xFF68, 'M', u'ィ'), (0xFF69, 'M', u'ゥ'), @@ -5272,6 +5411,10 @@ def _seg_50(): (0xFFA5, 'M', u'ᆬ'), (0xFFA6, 'M', u'ᆭ'), (0xFFA7, 'M', u'ᄃ'), + ] + +def _seg_52(): + return [ (0xFFA8, 'M', u'ᄄ'), (0xFFA9, 'M', u'ᄅ'), (0xFFAA, 'M', u'ᆰ'), @@ -5307,10 +5450,6 @@ def _seg_50(): (0xFFCB, 'M', u'ᅨ'), (0xFFCC, 'M', u'ᅩ'), (0xFFCD, 'M', u'ᅪ'), - ] - -def _seg_51(): - return [ (0xFFCE, 'M', u'ᅫ'), (0xFFCF, 'M', u'ᅬ'), (0xFFD0, 'X'), @@ -5360,21 +5499,29 @@ def _seg_51(): (0x10107, 'V'), (0x10134, 'X'), (0x10137, 'V'), - (0x1018B, 'X'), + (0x1018F, 'X'), (0x10190, 'V'), (0x1019C, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), (0x101D0, 'V'), (0x101FE, 'X'), (0x10280, 'V'), (0x1029D, 'X'), (0x102A0, 'V'), (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), (0x10300, 'V'), - (0x1031F, 'X'), - (0x10320, 'V'), (0x10324, 'X'), - (0x10330, 'V'), + (0x1032D, 'V'), + ] + +def _seg_53(): + return [ (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), (0x10380, 'V'), (0x1039E, 'X'), (0x1039F, 'V'), @@ -5411,10 +5558,6 @@ def _seg_51(): (0x1041B, 'M', u'𐑃'), (0x1041C, 'M', u'𐑄'), (0x1041D, 'M', u'𐑅'), - ] - -def _seg_52(): - return [ (0x1041E, 'M', u'𐑆'), (0x1041F, 'M', u'𐑇'), (0x10420, 'M', u'𐑈'), @@ -5429,6 +5572,61 @@ def _seg_52(): (0x1049E, 'X'), (0x104A0, 'V'), (0x104AA, 'X'), + (0x104B0, 'M', u'𐓘'), + (0x104B1, 'M', u'𐓙'), + (0x104B2, 'M', u'𐓚'), + (0x104B3, 'M', u'𐓛'), + (0x104B4, 'M', u'𐓜'), + (0x104B5, 'M', u'𐓝'), + (0x104B6, 'M', u'𐓞'), + (0x104B7, 'M', u'𐓟'), + (0x104B8, 'M', u'𐓠'), + (0x104B9, 'M', u'𐓡'), + (0x104BA, 'M', u'𐓢'), + (0x104BB, 'M', u'𐓣'), + (0x104BC, 'M', u'𐓤'), + (0x104BD, 'M', u'𐓥'), + (0x104BE, 'M', u'𐓦'), + (0x104BF, 'M', u'𐓧'), + (0x104C0, 'M', u'𐓨'), + (0x104C1, 'M', u'𐓩'), + (0x104C2, 'M', u'𐓪'), + (0x104C3, 'M', u'𐓫'), + (0x104C4, 'M', u'𐓬'), + (0x104C5, 'M', u'𐓭'), + (0x104C6, 'M', u'𐓮'), + (0x104C7, 'M', u'𐓯'), + (0x104C8, 'M', u'𐓰'), + (0x104C9, 'M', u'𐓱'), + (0x104CA, 'M', u'𐓲'), + (0x104CB, 'M', u'𐓳'), + (0x104CC, 'M', u'𐓴'), + (0x104CD, 'M', u'𐓵'), + (0x104CE, 'M', u'𐓶'), + (0x104CF, 'M', u'𐓷'), + (0x104D0, 'M', u'𐓸'), + (0x104D1, 'M', u'𐓹'), + (0x104D2, 'M', u'𐓺'), + (0x104D3, 'M', u'𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + ] + +def _seg_54(): + return [ + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), (0x10800, 'V'), (0x10806, 'X'), (0x10808, 'V'), @@ -5442,8 +5640,14 @@ def _seg_52(): (0x1083F, 'V'), (0x10856, 'X'), (0x10857, 'V'), - (0x10860, 'X'), - (0x10900, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), (0x1091C, 'X'), (0x1091F, 'V'), (0x1093A, 'X'), @@ -5451,9 +5655,9 @@ def _seg_52(): (0x10940, 'X'), (0x10980, 'V'), (0x109B8, 'X'), - (0x109BE, 'V'), - (0x109C0, 'X'), - (0x10A00, 'V'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), (0x10A04, 'X'), (0x10A05, 'V'), (0x10A07, 'X'), @@ -5470,7 +5674,11 @@ def _seg_52(): (0x10A50, 'V'), (0x10A59, 'X'), (0x10A60, 'V'), - (0x10A80, 'X'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), (0x10B00, 'V'), (0x10B36, 'X'), (0x10B39, 'V'), @@ -5478,16 +5686,80 @@ def _seg_52(): (0x10B58, 'V'), (0x10B73, 'X'), (0x10B78, 'V'), - (0x10B80, 'X'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), (0x10C00, 'V'), (0x10C49, 'X'), + (0x10C80, 'M', u'𐳀'), + (0x10C81, 'M', u'𐳁'), + (0x10C82, 'M', u'𐳂'), + (0x10C83, 'M', u'𐳃'), + (0x10C84, 'M', u'𐳄'), + (0x10C85, 'M', u'𐳅'), + (0x10C86, 'M', u'𐳆'), + (0x10C87, 'M', u'𐳇'), + (0x10C88, 'M', u'𐳈'), + (0x10C89, 'M', u'𐳉'), + (0x10C8A, 'M', u'𐳊'), + (0x10C8B, 'M', u'𐳋'), + (0x10C8C, 'M', u'𐳌'), + (0x10C8D, 'M', u'𐳍'), + (0x10C8E, 'M', u'𐳎'), + (0x10C8F, 'M', u'𐳏'), + (0x10C90, 'M', u'𐳐'), + (0x10C91, 'M', u'𐳑'), + (0x10C92, 'M', u'𐳒'), + (0x10C93, 'M', u'𐳓'), + (0x10C94, 'M', u'𐳔'), + (0x10C95, 'M', u'𐳕'), + (0x10C96, 'M', u'𐳖'), + (0x10C97, 'M', u'𐳗'), + (0x10C98, 'M', u'𐳘'), + (0x10C99, 'M', u'𐳙'), + (0x10C9A, 'M', u'𐳚'), + (0x10C9B, 'M', u'𐳛'), + (0x10C9C, 'M', u'𐳜'), + (0x10C9D, 'M', u'𐳝'), + ] + +def _seg_55(): + return [ + (0x10C9E, 'M', u'𐳞'), + (0x10C9F, 'M', u'𐳟'), + (0x10CA0, 'M', u'𐳠'), + (0x10CA1, 'M', u'𐳡'), + (0x10CA2, 'M', u'𐳢'), + (0x10CA3, 'M', u'𐳣'), + (0x10CA4, 'M', u'𐳤'), + (0x10CA5, 'M', u'𐳥'), + (0x10CA6, 'M', u'𐳦'), + (0x10CA7, 'M', u'𐳧'), + (0x10CA8, 'M', u'𐳨'), + (0x10CA9, 'M', u'𐳩'), + (0x10CAA, 'M', u'𐳪'), + (0x10CAB, 'M', u'𐳫'), + (0x10CAC, 'M', u'𐳬'), + (0x10CAD, 'M', u'𐳭'), + (0x10CAE, 'M', u'𐳮'), + (0x10CAF, 'M', u'𐳯'), + (0x10CB0, 'M', u'𐳰'), + (0x10CB1, 'M', u'𐳱'), + (0x10CB2, 'M', u'𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D00, 'X'), (0x10E60, 'V'), (0x10E7F, 'X'), (0x11000, 'V'), (0x1104E, 'X'), (0x11052, 'V'), (0x11070, 'X'), - (0x11080, 'V'), + (0x1107F, 'V'), (0x110BD, 'X'), (0x110BE, 'V'), (0x110C2, 'X'), @@ -5499,36 +5771,235 @@ def _seg_52(): (0x11135, 'X'), (0x11136, 'V'), (0x11144, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), (0x11180, 'V'), - (0x111C9, 'X'), + (0x111CE, 'X'), (0x111D0, 'V'), - (0x111DA, 'X'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133C, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + ] + +def _seg_56(): + return [ + (0x11400, 'V'), + (0x1145A, 'X'), + (0x1145B, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x1145E, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), (0x11680, 'V'), (0x116B8, 'X'), (0x116C0, 'V'), (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171A, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11740, 'X'), + (0x118A0, 'M', u'𑣀'), + (0x118A1, 'M', u'𑣁'), + (0x118A2, 'M', u'𑣂'), + (0x118A3, 'M', u'𑣃'), + (0x118A4, 'M', u'𑣄'), + (0x118A5, 'M', u'𑣅'), + (0x118A6, 'M', u'𑣆'), + (0x118A7, 'M', u'𑣇'), + (0x118A8, 'M', u'𑣈'), + (0x118A9, 'M', u'𑣉'), + (0x118AA, 'M', u'𑣊'), + (0x118AB, 'M', u'𑣋'), + (0x118AC, 'M', u'𑣌'), + (0x118AD, 'M', u'𑣍'), + (0x118AE, 'M', u'𑣎'), + (0x118AF, 'M', u'𑣏'), + (0x118B0, 'M', u'𑣐'), + (0x118B1, 'M', u'𑣑'), + (0x118B2, 'M', u'𑣒'), + (0x118B3, 'M', u'𑣓'), + (0x118B4, 'M', u'𑣔'), + (0x118B5, 'M', u'𑣕'), + (0x118B6, 'M', u'𑣖'), + (0x118B7, 'M', u'𑣗'), + (0x118B8, 'M', u'𑣘'), + (0x118B9, 'M', u'𑣙'), + (0x118BA, 'M', u'𑣚'), + (0x118BB, 'M', u'𑣛'), + (0x118BC, 'M', u'𑣜'), + (0x118BD, 'M', u'𑣝'), + (0x118BE, 'M', u'𑣞'), + (0x118BF, 'M', u'𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11900, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11A84, 'X'), + (0x11A86, 'V'), + (0x11A9D, 'X'), + (0x11A9E, 'V'), + (0x11AA3, 'X'), + (0x11AC0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + ] + +def _seg_57(): + return [ + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), (0x12000, 'V'), - (0x1236F, 'X'), + (0x1239A, 'X'), (0x12400, 'V'), - (0x12463, 'X'), + (0x1246F, 'X'), (0x12470, 'V'), - (0x12474, 'X'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), (0x13000, 'V'), (0x1342F, 'X'), - ] - -def _seg_53(): - return [ + (0x14400, 'V'), + (0x14647, 'X'), (0x16800, 'V'), (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16A70, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), (0x16F00, 'V'), (0x16F45, 'X'), (0x16F50, 'V'), (0x16F7F, 'X'), (0x16F8F, 'V'), (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE2, 'X'), + (0x17000, 'V'), + (0x187ED, 'X'), + (0x18800, 'V'), + (0x18AF3, 'X'), (0x1B000, 'V'), - (0x1B002, 'X'), + (0x1B11F, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), (0x1D000, 'V'), (0x1D0F6, 'X'), (0x1D100, 'V'), @@ -5551,7 +6022,7 @@ def _seg_53(): (0x1D1BF, 'M', u'𝆹𝅥𝅯'), (0x1D1C0, 'M', u'𝆺𝅥𝅯'), (0x1D1C1, 'V'), - (0x1D1DE, 'X'), + (0x1D1E9, 'X'), (0x1D200, 'V'), (0x1D246, 'X'), (0x1D300, 'V'), @@ -5564,6 +6035,10 @@ def _seg_53(): (0x1D403, 'M', u'd'), (0x1D404, 'M', u'e'), (0x1D405, 'M', u'f'), + ] + +def _seg_58(): + return [ (0x1D406, 'M', u'g'), (0x1D407, 'M', u'h'), (0x1D408, 'M', u'i'), @@ -5619,10 +6094,6 @@ def _seg_53(): (0x1D43A, 'M', u'g'), (0x1D43B, 'M', u'h'), (0x1D43C, 'M', u'i'), - ] - -def _seg_54(): - return [ (0x1D43D, 'M', u'j'), (0x1D43E, 'M', u'k'), (0x1D43F, 'M', u'l'), @@ -5668,6 +6139,10 @@ def _seg_54(): (0x1D467, 'M', u'z'), (0x1D468, 'M', u'a'), (0x1D469, 'M', u'b'), + ] + +def _seg_59(): + return [ (0x1D46A, 'M', u'c'), (0x1D46B, 'M', u'd'), (0x1D46C, 'M', u'e'), @@ -5723,10 +6198,6 @@ def _seg_54(): (0x1D49E, 'M', u'c'), (0x1D49F, 'M', u'd'), (0x1D4A0, 'X'), - ] - -def _seg_55(): - return [ (0x1D4A2, 'M', u'g'), (0x1D4A3, 'X'), (0x1D4A5, 'M', u'j'), @@ -5772,6 +6243,10 @@ def _seg_55(): (0x1D4CE, 'M', u'y'), (0x1D4CF, 'M', u'z'), (0x1D4D0, 'M', u'a'), + ] + +def _seg_60(): + return [ (0x1D4D1, 'M', u'b'), (0x1D4D2, 'M', u'c'), (0x1D4D3, 'M', u'd'), @@ -5827,10 +6302,6 @@ def _seg_55(): (0x1D505, 'M', u'b'), (0x1D506, 'X'), (0x1D507, 'M', u'd'), - ] - -def _seg_56(): - return [ (0x1D508, 'M', u'e'), (0x1D509, 'M', u'f'), (0x1D50A, 'M', u'g'), @@ -5876,6 +6347,10 @@ def _seg_56(): (0x1D533, 'M', u'v'), (0x1D534, 'M', u'w'), (0x1D535, 'M', u'x'), + ] + +def _seg_61(): + return [ (0x1D536, 'M', u'y'), (0x1D537, 'M', u'z'), (0x1D538, 'M', u'a'), @@ -5931,10 +6406,6 @@ def _seg_56(): (0x1D56C, 'M', u'a'), (0x1D56D, 'M', u'b'), (0x1D56E, 'M', u'c'), - ] - -def _seg_57(): - return [ (0x1D56F, 'M', u'd'), (0x1D570, 'M', u'e'), (0x1D571, 'M', u'f'), @@ -5980,6 +6451,10 @@ def _seg_57(): (0x1D599, 'M', u't'), (0x1D59A, 'M', u'u'), (0x1D59B, 'M', u'v'), + ] + +def _seg_62(): + return [ (0x1D59C, 'M', u'w'), (0x1D59D, 'M', u'x'), (0x1D59E, 'M', u'y'), @@ -6035,10 +6510,6 @@ def _seg_57(): (0x1D5D0, 'M', u'w'), (0x1D5D1, 'M', u'x'), (0x1D5D2, 'M', u'y'), - ] - -def _seg_58(): - return [ (0x1D5D3, 'M', u'z'), (0x1D5D4, 'M', u'a'), (0x1D5D5, 'M', u'b'), @@ -6084,6 +6555,10 @@ def _seg_58(): (0x1D5FD, 'M', u'p'), (0x1D5FE, 'M', u'q'), (0x1D5FF, 'M', u'r'), + ] + +def _seg_63(): + return [ (0x1D600, 'M', u's'), (0x1D601, 'M', u't'), (0x1D602, 'M', u'u'), @@ -6139,10 +6614,6 @@ def _seg_58(): (0x1D634, 'M', u's'), (0x1D635, 'M', u't'), (0x1D636, 'M', u'u'), - ] - -def _seg_59(): - return [ (0x1D637, 'M', u'v'), (0x1D638, 'M', u'w'), (0x1D639, 'M', u'x'), @@ -6188,6 +6659,10 @@ def _seg_59(): (0x1D661, 'M', u'l'), (0x1D662, 'M', u'm'), (0x1D663, 'M', u'n'), + ] + +def _seg_64(): + return [ (0x1D664, 'M', u'o'), (0x1D665, 'M', u'p'), (0x1D666, 'M', u'q'), @@ -6243,10 +6718,6 @@ def _seg_59(): (0x1D698, 'M', u'o'), (0x1D699, 'M', u'p'), (0x1D69A, 'M', u'q'), - ] - -def _seg_60(): - return [ (0x1D69B, 'M', u'r'), (0x1D69C, 'M', u's'), (0x1D69D, 'M', u't'), @@ -6292,6 +6763,10 @@ def _seg_60(): (0x1D6C6, 'M', u'ε'), (0x1D6C7, 'M', u'ζ'), (0x1D6C8, 'M', u'η'), + ] + +def _seg_65(): + return [ (0x1D6C9, 'M', u'θ'), (0x1D6CA, 'M', u'ι'), (0x1D6CB, 'M', u'κ'), @@ -6347,10 +6822,6 @@ def _seg_60(): (0x1D6FE, 'M', u'γ'), (0x1D6FF, 'M', u'δ'), (0x1D700, 'M', u'ε'), - ] - -def _seg_61(): - return [ (0x1D701, 'M', u'ζ'), (0x1D702, 'M', u'η'), (0x1D703, 'M', u'θ'), @@ -6396,6 +6867,10 @@ def _seg_61(): (0x1D72C, 'M', u'ρ'), (0x1D72D, 'M', u'θ'), (0x1D72E, 'M', u'σ'), + ] + +def _seg_66(): + return [ (0x1D72F, 'M', u'τ'), (0x1D730, 'M', u'υ'), (0x1D731, 'M', u'φ'), @@ -6451,10 +6926,6 @@ def _seg_61(): (0x1D764, 'M', u'ο'), (0x1D765, 'M', u'π'), (0x1D766, 'M', u'ρ'), - ] - -def _seg_62(): - return [ (0x1D767, 'M', u'θ'), (0x1D768, 'M', u'σ'), (0x1D769, 'M', u'τ'), @@ -6500,6 +6971,10 @@ def _seg_62(): (0x1D792, 'M', u'γ'), (0x1D793, 'M', u'δ'), (0x1D794, 'M', u'ε'), + ] + +def _seg_67(): + return [ (0x1D795, 'M', u'ζ'), (0x1D796, 'M', u'η'), (0x1D797, 'M', u'θ'), @@ -6555,10 +7030,6 @@ def _seg_62(): (0x1D7CA, 'M', u'ϝ'), (0x1D7CC, 'X'), (0x1D7CE, 'M', u'0'), - ] - -def _seg_63(): - return [ (0x1D7CF, 'M', u'1'), (0x1D7D0, 'M', u'2'), (0x1D7D1, 'M', u'3'), @@ -6604,11 +7075,74 @@ def _seg_63(): (0x1D7F9, 'M', u'3'), (0x1D7FA, 'M', u'4'), (0x1D7FB, 'M', u'5'), + ] + +def _seg_68(): + return [ (0x1D7FC, 'M', u'6'), (0x1D7FD, 'M', u'7'), (0x1D7FE, 'M', u'8'), (0x1D7FF, 'M', u'9'), - (0x1D800, 'X'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', u'𞤢'), + (0x1E901, 'M', u'𞤣'), + (0x1E902, 'M', u'𞤤'), + (0x1E903, 'M', u'𞤥'), + (0x1E904, 'M', u'𞤦'), + (0x1E905, 'M', u'𞤧'), + (0x1E906, 'M', u'𞤨'), + (0x1E907, 'M', u'𞤩'), + (0x1E908, 'M', u'𞤪'), + (0x1E909, 'M', u'𞤫'), + (0x1E90A, 'M', u'𞤬'), + (0x1E90B, 'M', u'𞤭'), + (0x1E90C, 'M', u'𞤮'), + (0x1E90D, 'M', u'𞤯'), + (0x1E90E, 'M', u'𞤰'), + (0x1E90F, 'M', u'𞤱'), + (0x1E910, 'M', u'𞤲'), + (0x1E911, 'M', u'𞤳'), + (0x1E912, 'M', u'𞤴'), + (0x1E913, 'M', u'𞤵'), + (0x1E914, 'M', u'𞤶'), + (0x1E915, 'M', u'𞤷'), + (0x1E916, 'M', u'𞤸'), + (0x1E917, 'M', u'𞤹'), + (0x1E918, 'M', u'𞤺'), + (0x1E919, 'M', u'𞤻'), + (0x1E91A, 'M', u'𞤼'), + (0x1E91B, 'M', u'𞤽'), + (0x1E91C, 'M', u'𞤾'), + (0x1E91D, 'M', u'𞤿'), + (0x1E91E, 'M', u'𞥀'), + (0x1E91F, 'M', u'𞥁'), + (0x1E920, 'M', u'𞥂'), + (0x1E921, 'M', u'𞥃'), + (0x1E922, 'V'), + (0x1E94B, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), (0x1EE00, 'M', u'ا'), (0x1EE01, 'M', u'ب'), (0x1EE02, 'M', u'ج'), @@ -6645,6 +7179,10 @@ def _seg_63(): (0x1EE21, 'M', u'ب'), (0x1EE22, 'M', u'ج'), (0x1EE23, 'X'), + ] + +def _seg_69(): + return [ (0x1EE24, 'M', u'ه'), (0x1EE25, 'X'), (0x1EE27, 'M', u'ح'), @@ -6659,10 +7197,6 @@ def _seg_63(): (0x1EE30, 'M', u'ف'), (0x1EE31, 'M', u'ص'), (0x1EE32, 'M', u'ق'), - ] - -def _seg_64(): - return [ (0x1EE33, 'X'), (0x1EE34, 'M', u'ش'), (0x1EE35, 'M', u'ت'), @@ -6749,6 +7283,10 @@ def _seg_64(): (0x1EE90, 'M', u'ف'), (0x1EE91, 'M', u'ص'), (0x1EE92, 'M', u'ق'), + ] + +def _seg_70(): + return [ (0x1EE93, 'M', u'ر'), (0x1EE94, 'M', u'ش'), (0x1EE95, 'M', u'ت'), @@ -6763,10 +7301,6 @@ def _seg_64(): (0x1EEA2, 'M', u'ج'), (0x1EEA3, 'M', u'د'), (0x1EEA4, 'X'), - ] - -def _seg_65(): - return [ (0x1EEA5, 'M', u'و'), (0x1EEA6, 'M', u'ز'), (0x1EEA7, 'M', u'ح'), @@ -6800,11 +7334,11 @@ def _seg_65(): (0x1F0A0, 'V'), (0x1F0AF, 'X'), (0x1F0B1, 'V'), - (0x1F0BF, 'X'), + (0x1F0C0, 'X'), (0x1F0C1, 'V'), (0x1F0D0, 'X'), (0x1F0D1, 'V'), - (0x1F0E0, 'X'), + (0x1F0F6, 'X'), (0x1F101, '3', u'0,'), (0x1F102, '3', u'1,'), (0x1F103, '3', u'2,'), @@ -6815,7 +7349,8 @@ def _seg_65(): (0x1F108, '3', u'7,'), (0x1F109, '3', u'8,'), (0x1F10A, '3', u'9,'), - (0x1F10B, 'X'), + (0x1F10B, 'V'), + (0x1F10D, 'X'), (0x1F110, '3', u'(a)'), (0x1F111, '3', u'(b)'), (0x1F112, '3', u'(c)'), @@ -6852,6 +7387,10 @@ def _seg_65(): (0x1F131, 'M', u'b'), (0x1F132, 'M', u'c'), (0x1F133, 'M', u'd'), + ] + +def _seg_71(): + return [ (0x1F134, 'M', u'e'), (0x1F135, 'M', u'f'), (0x1F136, 'M', u'g'), @@ -6867,10 +7406,6 @@ def _seg_65(): (0x1F140, 'M', u'q'), (0x1F141, 'M', u'r'), (0x1F142, 'M', u's'), - ] - -def _seg_66(): - return [ (0x1F143, 'M', u't'), (0x1F144, 'M', u'u'), (0x1F145, 'M', u'v'), @@ -6891,7 +7426,7 @@ def _seg_66(): (0x1F170, 'V'), (0x1F190, 'M', u'dj'), (0x1F191, 'V'), - (0x1F19B, 'X'), + (0x1F1AD, 'X'), (0x1F1E6, 'V'), (0x1F200, 'M', u'ほか'), (0x1F201, 'M', u'ココ'), @@ -6940,7 +7475,8 @@ def _seg_66(): (0x1F238, 'M', u'申'), (0x1F239, 'M', u'割'), (0x1F23A, 'M', u'営'), - (0x1F23B, 'X'), + (0x1F23B, 'M', u'配'), + (0x1F23C, 'X'), (0x1F240, 'M', u'〔本〕'), (0x1F241, 'M', u'〔三〕'), (0x1F242, 'M', u'〔二〕'), @@ -6954,52 +7490,56 @@ def _seg_66(): (0x1F250, 'M', u'得'), (0x1F251, 'M', u'可'), (0x1F252, 'X'), - (0x1F300, 'V'), - (0x1F321, 'X'), - (0x1F330, 'V'), - (0x1F336, 'X'), - (0x1F337, 'V'), - (0x1F37D, 'X'), - (0x1F380, 'V'), - (0x1F394, 'X'), - (0x1F3A0, 'V'), - (0x1F3C5, 'X'), - (0x1F3C6, 'V'), - (0x1F3CB, 'X'), - (0x1F3E0, 'V'), - (0x1F3F1, 'X'), - (0x1F400, 'V'), - (0x1F43F, 'X'), - (0x1F440, 'V'), + (0x1F260, 'V'), ] -def _seg_67(): +def _seg_72(): return [ - (0x1F441, 'X'), - (0x1F442, 'V'), - (0x1F4F8, 'X'), - (0x1F4F9, 'V'), - (0x1F4FD, 'X'), - (0x1F500, 'V'), - (0x1F53E, 'X'), - (0x1F540, 'V'), - (0x1F544, 'X'), - (0x1F550, 'V'), - (0x1F568, 'X'), - (0x1F5FB, 'V'), - (0x1F641, 'X'), - (0x1F645, 'V'), - (0x1F650, 'X'), - (0x1F680, 'V'), - (0x1F6C6, 'X'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D5, 'X'), + (0x1F6E0, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6F9, 'X'), (0x1F700, 'V'), (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D5, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F900, 'V'), + (0x1F90C, 'X'), + (0x1F910, 'V'), + (0x1F93F, 'X'), + (0x1F940, 'V'), + (0x1F94D, 'X'), + (0x1F950, 'V'), + (0x1F96C, 'X'), + (0x1F980, 'V'), + (0x1F998, 'X'), + (0x1F9C0, 'V'), + (0x1F9C1, 'X'), + (0x1F9D0, 'V'), + (0x1F9E7, 'X'), (0x20000, 'V'), (0x2A6D7, 'X'), (0x2A700, 'V'), (0x2B735, 'X'), (0x2B740, 'V'), (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), (0x2F800, 'M', u'丽'), (0x2F801, 'M', u'丸'), (0x2F802, 'M', u'乁'), @@ -7055,6 +7595,10 @@ def _seg_67(): (0x2F836, 'M', u'及'), (0x2F837, 'M', u'叟'), (0x2F838, 'M', u'𠭣'), + ] + +def _seg_73(): + return [ (0x2F839, 'M', u'叫'), (0x2F83A, 'M', u'叱'), (0x2F83B, 'M', u'吆'), @@ -7075,10 +7619,6 @@ def _seg_67(): (0x2F84B, 'M', u'圖'), (0x2F84C, 'M', u'嘆'), (0x2F84D, 'M', u'圗'), - ] - -def _seg_68(): - return [ (0x2F84E, 'M', u'噑'), (0x2F84F, 'M', u'噴'), (0x2F850, 'M', u'切'), @@ -7159,6 +7699,10 @@ def _seg_68(): (0x2F89E, 'M', u'志'), (0x2F89F, 'M', u'忹'), (0x2F8A0, 'M', u'悁'), + ] + +def _seg_74(): + return [ (0x2F8A1, 'M', u'㤺'), (0x2F8A2, 'M', u'㤜'), (0x2F8A3, 'M', u'悔'), @@ -7179,10 +7723,6 @@ def _seg_68(): (0x2F8B2, 'M', u'成'), (0x2F8B3, 'M', u'戛'), (0x2F8B4, 'M', u'扝'), - ] - -def _seg_69(): - return [ (0x2F8B5, 'M', u'抱'), (0x2F8B6, 'M', u'拔'), (0x2F8B7, 'M', u'捐'), @@ -7263,6 +7803,10 @@ def _seg_69(): (0x2F902, 'M', u'流'), (0x2F903, 'M', u'浩'), (0x2F904, 'M', u'浸'), + ] + +def _seg_75(): + return [ (0x2F905, 'M', u'涅'), (0x2F906, 'M', u'𣴞'), (0x2F907, 'M', u'洴'), @@ -7283,10 +7827,6 @@ def _seg_69(): (0x2F916, 'M', u'㶖'), (0x2F917, 'M', u'灊'), (0x2F918, 'M', u'災'), - ] - -def _seg_70(): - return [ (0x2F919, 'M', u'灷'), (0x2F91A, 'M', u'炭'), (0x2F91B, 'M', u'𠔥'), @@ -7367,6 +7907,10 @@ def _seg_70(): (0x2F969, 'M', u'糣'), (0x2F96A, 'M', u'紀'), (0x2F96B, 'M', u'𥾆'), + ] + +def _seg_76(): + return [ (0x2F96C, 'M', u'絣'), (0x2F96D, 'M', u'䌁'), (0x2F96E, 'M', u'緇'), @@ -7387,10 +7931,6 @@ def _seg_70(): (0x2F97D, 'M', u'聠'), (0x2F97E, 'M', u'𦖨'), (0x2F97F, 'M', u'聰'), - ] - -def _seg_71(): - return [ (0x2F980, 'M', u'𣍟'), (0x2F981, 'M', u'䏕'), (0x2F982, 'M', u'育'), @@ -7471,6 +8011,10 @@ def _seg_71(): (0x2F9CD, 'M', u'䚾'), (0x2F9CE, 'M', u'䛇'), (0x2F9CF, 'M', u'誠'), + ] + +def _seg_77(): + return [ (0x2F9D0, 'M', u'諭'), (0x2F9D1, 'M', u'變'), (0x2F9D2, 'M', u'豕'), @@ -7491,10 +8035,6 @@ def _seg_71(): (0x2F9E1, 'M', u'𨗭'), (0x2F9E2, 'M', u'邔'), (0x2F9E3, 'M', u'郱'), - ] - -def _seg_72(): - return [ (0x2F9E4, 'M', u'鄑'), (0x2F9E5, 'M', u'𨜮'), (0x2F9E6, 'M', u'鄛'), @@ -7631,4 +8171,9 @@ def _seg_72(): + _seg_70() + _seg_71() + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() ) diff --git a/pipenv/patched/notpip/_vendor/ipaddress.py b/pipenv/patched/notpip/_vendor/ipaddress.py index 8cfdd58acf..f2d0766842 100644 --- a/pipenv/patched/notpip/_vendor/ipaddress.py +++ b/pipenv/patched/notpip/_vendor/ipaddress.py @@ -14,7 +14,7 @@ import itertools import struct -__version__ = '1.0.19' +__version__ = '1.0.22' # Compatibility functions _compat_int_types = (int,) diff --git a/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py b/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py index 3f3a0a4e06..ed57821d14 100644 --- a/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py +++ b/pipenv/patched/notpip/_vendor/pkg_resources/__init__.py @@ -377,11 +377,7 @@ def get_build_platform(): XXX Currently this is the same as ``distutils.util.get_platform()``, but it needs some hacks for Linux and Mac OS X. """ - try: - # Python 2.7 or >=3.2 - from sysconfig import get_platform - except ImportError: - from distutils.util import get_platform + from sysconfig import get_platform plat = get_platform() if sys.platform == "darwin" and not plat.startswith('macosx-'): @@ -1518,12 +1514,10 @@ def _get(self, path): @classmethod def _register(cls): - loader_cls = getattr( - importlib_machinery, - 'SourceFileLoader', - type(None), - ) - register_loader_type(loader_cls, cls) + loader_names = 'SourceFileLoader', 'SourcelessFileLoader', + for name in loader_names: + loader_cls = getattr(importlib_machinery, name, type(None)) + register_loader_type(loader_cls, cls) DefaultProvider._register() @@ -2669,6 +2663,19 @@ def __getattr__(self, attr): raise AttributeError(attr) return getattr(self._provider, attr) + def __dir__(self): + return list( + set(super(Distribution, self).__dir__()) + | set( + attr for attr in self._provider.__dir__() + if not attr.startswith('_') + ) + ) + + if not hasattr(object, '__dir__'): + # python 2.7 not supported + del __dir__ + @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( diff --git a/pipenv/patched/notpip/_vendor/progress/__init__.py b/pipenv/patched/notpip/_vendor/progress/__init__.py index 09dfc1ebe6..a41f65dc59 100644 --- a/pipenv/patched/notpip/_vendor/progress/__init__.py +++ b/pipenv/patched/notpip/_vendor/progress/__init__.py @@ -21,7 +21,7 @@ from time import time -__version__ = '1.3' +__version__ = '1.4' class Infinite(object): diff --git a/pipenv/patched/notpip/_vendor/progress/bar.py b/pipenv/patched/notpip/_vendor/progress/bar.py index 5ee968f0ce..025e61c452 100644 --- a/pipenv/patched/notpip/_vendor/progress/bar.py +++ b/pipenv/patched/notpip/_vendor/progress/bar.py @@ -15,6 +15,9 @@ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import unicode_literals + +import sys + from . import Progress from .helpers import WritelnMixin @@ -61,7 +64,10 @@ class FillingCirclesBar(ChargingBar): class IncrementalBar(Bar): - phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') + if sys.platform.startswith('win'): + phases = (u' ', u'▌', u'█') + else: + phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') def update(self): nphases = len(self.phases) diff --git a/pipenv/patched/notpip/_vendor/progress/helpers.py b/pipenv/patched/notpip/_vendor/progress/helpers.py index 9ed90b2bc4..0cde44ec27 100644 --- a/pipenv/patched/notpip/_vendor/progress/helpers.py +++ b/pipenv/patched/notpip/_vendor/progress/helpers.py @@ -28,14 +28,14 @@ def __init__(self, message=None, **kwargs): if message: self.message = message - if self.file.isatty(): + if self.file and self.file.isatty(): if self.hide_cursor: print(HIDE_CURSOR, end='', file=self.file) print(self.message, end='', file=self.file) self.file.flush() def write(self, s): - if self.file.isatty(): + if self.file and self.file.isatty(): b = '\b' * self._width c = s.ljust(self._width) print(b + c, end='', file=self.file) @@ -43,7 +43,7 @@ def write(self, s): self.file.flush() def finish(self): - if self.file.isatty() and self.hide_cursor: + if self.file and self.file.isatty() and self.hide_cursor: print(SHOW_CURSOR, end='', file=self.file) @@ -55,21 +55,21 @@ def __init__(self, message=None, **kwargs): if message: self.message = message - if self.file.isatty() and self.hide_cursor: + if self.file and self.file.isatty() and self.hide_cursor: print(HIDE_CURSOR, end='', file=self.file) def clearln(self): - if self.file.isatty(): + if self.file and self.file.isatty(): print('\r\x1b[K', end='', file=self.file) def writeln(self, line): - if self.file.isatty(): + if self.file and self.file.isatty(): self.clearln() print(line, end='', file=self.file) self.file.flush() def finish(self): - if self.file.isatty(): + if self.file and self.file.isatty(): print(file=self.file) if self.hide_cursor: print(SHOW_CURSOR, end='', file=self.file) diff --git a/pipenv/patched/notpip/_vendor/pytoml/parser.py b/pipenv/patched/notpip/_vendor/pytoml/parser.py index 7fc3d34d5b..e03a03fbda 100644 --- a/pipenv/patched/notpip/_vendor/pytoml/parser.py +++ b/pipenv/patched/notpip/_vendor/pytoml/parser.py @@ -6,35 +6,35 @@ else: _chr = chr -def load(fin, translate=lambda t, x, v: v): - return loads(fin.read(), translate=translate, filename=getattr(fin, 'name', repr(fin))) +def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict): + return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin))) -def loads(s, filename='', translate=lambda t, x, v: v): +def loads(s, filename='', translate=lambda t, x, v: v, object_pairs_hook=dict): if isinstance(s, bytes): s = s.decode('utf-8') s = s.replace('\r\n', '\n') - root = {} - tables = {} + root = object_pairs_hook() + tables = object_pairs_hook() scope = root src = _Source(s, filename=filename) - ast = _p_toml(src) + ast = _p_toml(src, object_pairs_hook=object_pairs_hook) def error(msg): raise TomlError(msg, pos[0], pos[1], filename) - def process_value(v): + def process_value(v, object_pairs_hook): kind, text, value, pos = v if kind == 'str' and value.startswith('\n'): value = value[1:] if kind == 'array': if value and any(k != value[0][0] for k, t, v, p in value[1:]): error('array-type-mismatch') - value = [process_value(item) for item in value] + value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value] elif kind == 'table': - value = dict([(k, process_value(value[k])) for k in value]) + value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value]) return translate(kind, text, value) for kind, value, pos in ast: @@ -42,7 +42,7 @@ def process_value(v): k, v = value if k in scope: error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) - scope[k] = process_value(v) + scope[k] = process_value(v, object_pairs_hook=object_pairs_hook) else: is_table_array = (kind == 'table_array') cur = tables @@ -50,19 +50,19 @@ def process_value(v): if isinstance(cur.get(name), list): d, cur = cur[name][-1] else: - d, cur = cur.setdefault(name, (None, {})) + d, cur = cur.setdefault(name, (None, object_pairs_hook())) - scope = {} + scope = object_pairs_hook() name = value[-1] if name not in cur: if is_table_array: - cur[name] = [(scope, {})] + cur[name] = [(scope, object_pairs_hook())] else: - cur[name] = (scope, {}) + cur[name] = (scope, object_pairs_hook()) elif isinstance(cur[name], list): if not is_table_array: error('table_type_mismatch') - cur[name].append((scope, {})) + cur[name].append((scope, object_pairs_hook())) else: if is_table_array: error('table_type_mismatch') @@ -73,7 +73,7 @@ def process_value(v): def merge_tables(scope, tables): if scope is None: - scope = {} + scope = object_pairs_hook() for k in tables: if k in scope: error('key_table_conflict') @@ -225,7 +225,7 @@ def _p_key(s): _basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') _litstr_re = re.compile(r"[^'\000-\037]*") _litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*") -def _p_value(s): +def _p_value(s, object_pairs_hook): pos = s.pos() if s.consume('true'): @@ -283,7 +283,7 @@ def _p_value(s): with s: while True: _p_ews(s) - items.append(_p_value(s)) + items.append(_p_value(s, object_pairs_hook=object_pairs_hook)) s.commit() _p_ews(s) s.expect(',') @@ -294,13 +294,13 @@ def _p_value(s): if s.consume('{'): _p_ws(s) - items = {} + items = object_pairs_hook() if not s.consume('}'): k = _p_key(s) _p_ws(s) s.expect('=') _p_ws(s) - items[k] = _p_value(s) + items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) _p_ws(s) while s.consume(','): _p_ws(s) @@ -308,14 +308,14 @@ def _p_value(s): _p_ws(s) s.expect('=') _p_ws(s) - items[k] = _p_value(s) + items[k] = _p_value(s, object_pairs_hook=object_pairs_hook) _p_ws(s) s.expect('}') return 'table', None, items, pos s.fail() -def _p_stmt(s): +def _p_stmt(s, object_pairs_hook): pos = s.pos() if s.consume( '['): is_array = s.consume('[') @@ -335,19 +335,19 @@ def _p_stmt(s): _p_ws(s) s.expect('=') _p_ws(s) - value = _p_value(s) + value = _p_value(s, object_pairs_hook=object_pairs_hook) return 'kv', (key, value), pos _stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') -def _p_toml(s): +def _p_toml(s, object_pairs_hook): stmts = [] _p_ews(s) with s: - stmts.append(_p_stmt(s)) + stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) while True: s.commit() s.expect_re(_stmtsep_re) - stmts.append(_p_stmt(s)) + stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook)) _p_ews(s) s.expect_eof() return stmts diff --git a/pipenv/patched/notpip/_vendor/requests/LICENSE b/pipenv/patched/notpip/_vendor/requests/LICENSE index db78ea69f4..2e68b82ecb 100644 --- a/pipenv/patched/notpip/_vendor/requests/LICENSE +++ b/pipenv/patched/notpip/_vendor/requests/LICENSE @@ -1,4 +1,4 @@ -Copyright 2017 Kenneth Reitz +Copyright 2018 Kenneth Reitz Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/pipenv/patched/notpip/_vendor/requests/__init__.py b/pipenv/patched/notpip/_vendor/requests/__init__.py index 9270af263d..8e7576c13f 100644 --- a/pipenv/patched/notpip/_vendor/requests/__init__.py +++ b/pipenv/patched/notpip/_vendor/requests/__init__.py @@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version): # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.22 + # urllib3 >= 1.21.1, <= 1.23 assert major == 1 assert minor >= 21 - assert minor <= 22 + assert minor <= 23 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] @@ -71,6 +71,17 @@ def check_compatibility(urllib3_version, chardet_version): assert patch >= 2 +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split('.'))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = 'Old version of cryptography ({0}) may cause slowdown.'.format(cryptography_version) + warnings.warn(warning, RequestsDependencyWarning) + # Check imported dependencies for compatibility. try: check_compatibility(urllib3.__version__, chardet.__version__) @@ -85,6 +96,10 @@ def check_compatibility(urllib3_version, chardet_version): try: from pipenv.patched.notpip._vendor.urllib3.contrib import pyopenssl pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + _check_cryptography(cryptography_version) except ImportError: pass diff --git a/pipenv/patched/notpip/_vendor/requests/__version__.py b/pipenv/patched/notpip/_vendor/requests/__version__.py index dc33eef651..ef61ec0f55 100644 --- a/pipenv/patched/notpip/_vendor/requests/__version__.py +++ b/pipenv/patched/notpip/_vendor/requests/__version__.py @@ -5,10 +5,10 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'http://python-requests.org' -__version__ = '2.18.4' -__build__ = 0x021804 +__version__ = '2.19.1' +__build__ = 0x021901 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2017 Kenneth Reitz' +__copyright__ = 'Copyright 2018 Kenneth Reitz' __cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/pipenv/patched/notpip/_vendor/requests/adapters.py b/pipenv/patched/notpip/_vendor/requests/adapters.py index 58cea3a6b3..014c267523 100644 --- a/pipenv/patched/notpip/_vendor/requests/adapters.py +++ b/pipenv/patched/notpip/_vendor/requests/adapters.py @@ -13,6 +13,7 @@ from pipenv.patched.notpip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url from pipenv.patched.notpip._vendor.urllib3.response import HTTPResponse +from pipenv.patched.notpip._vendor.urllib3.util import parse_url from pipenv.patched.notpip._vendor.urllib3.util import Timeout as TimeoutSauce from pipenv.patched.notpip._vendor.urllib3.util.retry import Retry from pipenv.patched.notpip._vendor.urllib3.exceptions import ClosedPoolError @@ -28,13 +29,13 @@ from .models import Response from .compat import urlparse, basestring -from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, - prepend_scheme_if_needed, get_auth_from_url, urldefragauth, - select_proxy) +from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, + get_encoding_from_headers, prepend_scheme_if_needed, + get_auth_from_url, urldefragauth, select_proxy) from .structures import CaseInsensitiveDict from .cookies import extract_cookies_to_jar from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError, InvalidSchema) + ProxyError, RetryError, InvalidSchema, InvalidProxyURL) from .auth import _basic_auth_str try: @@ -219,7 +220,7 @@ def cert_verify(self, conn, url, verify, cert): cert_loc = verify if not cert_loc: - cert_loc = DEFAULT_CA_BUNDLE_PATH + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) if not cert_loc or not os.path.exists(cert_loc): raise IOError("Could not find a suitable TLS CA certificate bundle, " @@ -300,6 +301,10 @@ def get_connection(self, url, proxies=None): if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL("Please check proxy URL. It is malformed" + " and could be missing the host.") proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_url(url) else: @@ -406,7 +411,7 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) - self.add_headers(request) + self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) chunked = not (request.body is None or 'Content-Length' in request.headers) diff --git a/pipenv/patched/notpip/_vendor/requests/api.py b/pipenv/patched/notpip/_vendor/requests/api.py index bc2115c150..a2cc84d769 100644 --- a/pipenv/patched/notpip/_vendor/requests/api.py +++ b/pipenv/patched/notpip/_vendor/requests/api.py @@ -20,7 +20,7 @@ def request(method, url, **kwargs): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. diff --git a/pipenv/patched/notpip/_vendor/requests/auth.py b/pipenv/patched/notpip/_vendor/requests/auth.py index 1a182dffdd..4ae459474d 100644 --- a/pipenv/patched/notpip/_vendor/requests/auth.py +++ b/pipenv/patched/notpip/_vendor/requests/auth.py @@ -153,6 +153,18 @@ def sha_utf8(x): x = x.encode('utf-8') return hashlib.sha1(x).hexdigest() hash_utf8 = sha_utf8 + elif _algorithm == 'SHA-256': + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha256(x).hexdigest() + hash_utf8 = sha256_utf8 + elif _algorithm == 'SHA-512': + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha512(x).hexdigest() + hash_utf8 = sha512_utf8 KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) diff --git a/pipenv/patched/notpip/_vendor/requests/compat.py b/pipenv/patched/notpip/_vendor/requests/compat.py index 011972c64f..4fbd6231b8 100644 --- a/pipenv/patched/notpip/_vendor/requests/compat.py +++ b/pipenv/patched/notpip/_vendor/requests/compat.py @@ -47,6 +47,7 @@ import cookielib from Cookie import Morsel from StringIO import StringIO + from collections import Callable, Mapping, MutableMapping from pipenv.patched.notpip._vendor.urllib3.packages.ordered_dict import OrderedDict @@ -64,6 +65,7 @@ from http.cookies import Morsel from io import StringIO from collections import OrderedDict + from collections.abc import Callable, Mapping, MutableMapping builtin_str = str str = str diff --git a/pipenv/patched/notpip/_vendor/requests/cookies.py b/pipenv/patched/notpip/_vendor/requests/cookies.py index ab3c88b9bf..50883a84f3 100644 --- a/pipenv/patched/notpip/_vendor/requests/cookies.py +++ b/pipenv/patched/notpip/_vendor/requests/cookies.py @@ -12,10 +12,9 @@ import copy import time import calendar -import collections from ._internal_utils import to_native_string -from .compat import cookielib, urlparse, urlunparse, Morsel +from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping try: import threading @@ -169,7 +168,7 @@ class CookieConflictError(RuntimeError): """ -class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface. @@ -415,9 +414,14 @@ def __setstate__(self, state): def copy(self): """Return a copy of this RequestsCookieJar.""" new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) new_cj.update(self) return new_cj + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + def _copy_cookie_jar(jar): if jar is None: diff --git a/pipenv/patched/notpip/_vendor/requests/exceptions.py b/pipenv/patched/notpip/_vendor/requests/exceptions.py index ea644a8f43..78b573c791 100644 --- a/pipenv/patched/notpip/_vendor/requests/exceptions.py +++ b/pipenv/patched/notpip/_vendor/requests/exceptions.py @@ -85,6 +85,10 @@ class InvalidHeader(RequestException, ValueError): """The header value provided was somehow invalid.""" +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + class ChunkedEncodingError(RequestException): """The server declared chunked encoding but sent an invalid chunk.""" diff --git a/pipenv/patched/notpip/_vendor/requests/help.py b/pipenv/patched/notpip/_vendor/requests/help.py index abc097a7e1..eba69edbd3 100644 --- a/pipenv/patched/notpip/_vendor/requests/help.py +++ b/pipenv/patched/notpip/_vendor/requests/help.py @@ -13,7 +13,7 @@ from . import __version__ as requests_version try: - from .packages.urllib3.contrib import pyopenssl + from pipenv.patched.notpip._vendor.urllib3.contrib import pyopenssl except ImportError: pyopenssl = None OpenSSL = None diff --git a/pipenv/patched/notpip/_vendor/requests/models.py b/pipenv/patched/notpip/_vendor/requests/models.py index 157a278284..6708f09b84 100644 --- a/pipenv/patched/notpip/_vendor/requests/models.py +++ b/pipenv/patched/notpip/_vendor/requests/models.py @@ -7,7 +7,6 @@ This module contains the primary objects that power Requests. """ -import collections import datetime import sys @@ -37,6 +36,7 @@ stream_decode_response_unicode, to_key_val_list, parse_header_links, iter_slices, guess_json_utf, super_len, check_header_validity) from .compat import ( + Callable, Mapping, cookielib, urlunparse, urlsplit, urlencode, str, bytes, is_py2, chardet, builtin_str, basestring) from .compat import json as complexjson @@ -155,8 +155,12 @@ def _encode_files(files, data): if isinstance(fp, (str, bytes, bytearray)): fdata = fp - else: + elif hasattr(fp, 'read'): fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) @@ -174,10 +178,10 @@ def register_hook(self, event, hook): if event not in self.hooks: raise ValueError('Unsupported event specified, with event name "%s"' % (event)) - if isinstance(hook, collections.Callable): + if isinstance(hook, Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): - self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) def deregister_hook(self, event, hook): """Deregister a previously registered hook. @@ -461,7 +465,7 @@ def prepare_body(self, data, files, json=None): is_stream = all([ hasattr(data, '__iter__'), - not isinstance(data, (basestring, list, tuple, collections.Mapping)) + not isinstance(data, (basestring, list, tuple, Mapping)) ]) try: @@ -686,11 +690,11 @@ def __iter__(self): @property def ok(self): - """Returns True if :attr:`status_code` is less than 400. + """Returns True if :attr:`status_code` is less than 400, False if not. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This + the status code is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. """ try: @@ -820,7 +824,7 @@ def content(self): if self.status_code == 0 or self.raw is None: self._content = None else: - self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() + self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 diff --git a/pipenv/patched/notpip/_vendor/requests/sessions.py b/pipenv/patched/notpip/_vendor/requests/sessions.py index 6570e73349..ba135268ab 100644 --- a/pipenv/patched/notpip/_vendor/requests/sessions.py +++ b/pipenv/patched/notpip/_vendor/requests/sessions.py @@ -8,13 +8,12 @@ requests (cookies, auth, proxies). """ import os -import platform +import sys import time -from collections import Mapping from datetime import timedelta from .auth import _basic_auth_str -from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse +from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT @@ -38,8 +37,8 @@ from .models import REDIRECT_STATI # Preferred clock, based on which one is more accurate on a given system. -if platform.system() == 'Windows': - try: # Python 3.3+ +if sys.platform == 'win32': + try: # Python 3.4+ preferred_clock = time.perf_counter except AttributeError: # Earlier than Python 3. preferred_clock = time.clock @@ -123,6 +122,7 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, hist = [] # keep track of history url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment while url: prepared_request = req.copy() @@ -147,8 +147,12 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, parsed_rurl = urlparse(resp.url) url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) - # The scheme should be lower case... + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) parsed = urlparse(url) + if parsed.fragment == '' and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment url = parsed.geturl() # Facilitate relative 'location' headers, as allowed by RFC 7231. @@ -696,7 +700,7 @@ def get_adapter(self, url): """ for (prefix, adapter) in self.adapters.items(): - if url.lower().startswith(prefix): + if url.lower().startswith(prefix.lower()): return adapter # Nothing matches :-/ diff --git a/pipenv/patched/notpip/_vendor/requests/status_codes.py b/pipenv/patched/notpip/_vendor/requests/status_codes.py index dee89190c0..ff462c6c69 100644 --- a/pipenv/patched/notpip/_vendor/requests/status_codes.py +++ b/pipenv/patched/notpip/_vendor/requests/status_codes.py @@ -1,5 +1,22 @@ # -*- coding: utf-8 -*- +""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +>>> requests.codes['temporary_redirect'] +307 +>>> requests.codes.teapot +418 +>>> requests.codes['\o/'] +200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + from .structures import LookupDict _codes = { @@ -84,8 +101,20 @@ codes = LookupDict(name='status_codes') -for code, titles in _codes.items(): - for title in titles: - setattr(codes, title, code) - if not title.startswith(('\\', '/')): - setattr(codes, title.upper(), code) +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(('\\', '/')): + setattr(codes, title.upper(), code) + + def doc(code): + names = ', '.join('``%s``' % n for n in _codes[code]) + return '* %d: %s' % (code, names) + + global __doc__ + __doc__ = (__doc__ + '\n' + + '\n'.join(doc(code) for code in sorted(_codes)) + if __doc__ is not None else None) + +_init() diff --git a/pipenv/patched/notpip/_vendor/requests/structures.py b/pipenv/patched/notpip/_vendor/requests/structures.py index 05d2b3f57b..da930e2852 100644 --- a/pipenv/patched/notpip/_vendor/requests/structures.py +++ b/pipenv/patched/notpip/_vendor/requests/structures.py @@ -7,16 +7,14 @@ Data structures that power Requests. """ -import collections +from .compat import OrderedDict, Mapping, MutableMapping -from .compat import OrderedDict - -class CaseInsensitiveDict(collections.MutableMapping): +class CaseInsensitiveDict(MutableMapping): """A case-insensitive ``dict``-like object. Implements all methods and operations of - ``collections.MutableMapping`` as well as dict's ``copy``. Also + ``MutableMapping`` as well as dict's ``copy``. Also provides ``lower_items``. All keys are expected to be strings. The structure remembers the @@ -71,7 +69,7 @@ def lower_items(self): ) def __eq__(self, other): - if isinstance(other, collections.Mapping): + if isinstance(other, Mapping): other = CaseInsensitiveDict(other) else: return NotImplemented diff --git a/pipenv/patched/notpip/_vendor/requests/utils.py b/pipenv/patched/notpip/_vendor/requests/utils.py index 5c47de9893..431f6be074 100644 --- a/pipenv/patched/notpip/_vendor/requests/utils.py +++ b/pipenv/patched/notpip/_vendor/requests/utils.py @@ -8,17 +8,17 @@ that are also useful for external consumption. """ -import cgi import codecs -import collections import contextlib import io import os -import platform import re import socket import struct +import sys +import tempfile import warnings +import zipfile from .__version__ import __version__ from . import certs @@ -28,7 +28,7 @@ from .compat import ( quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, proxy_bypass, urlunparse, basestring, integer_types, is_py3, - proxy_bypass_environment, getproxies_environment) + proxy_bypass_environment, getproxies_environment, Mapping) from .cookies import cookiejar_from_dict from .structures import CaseInsensitiveDict from .exceptions import ( @@ -39,19 +39,25 @@ DEFAULT_CA_BUNDLE_PATH = certs.where() -if platform.system() == 'Windows': +if sys.platform == 'win32': # provide a proxy_bypass version on Windows without DNS lookups def proxy_bypass_registry(host): - if is_py3: - import winreg - else: - import _winreg as winreg + try: + if is_py3: + import winreg + else: + import _winreg as winreg + except ImportError: + return False + try: internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') - proxyEnable = winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0] + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0]) + # ProxyOverride is almost always a string proxyOverride = winreg.QueryValueEx(internetSettings, 'ProxyOverride')[0] except OSError: @@ -216,6 +222,38 @@ def guess_filename(obj): return os.path.basename(name) +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + member = '/'.join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, *member.split('/')) + if not os.path.exists(extracted_path): + extracted_path = zip_file.extract(member, path=tmp) + + return extracted_path + + def from_key_val_list(value): """Take an object and test to see if it can be represented as a dictionary. Unless it can not be represented as such, return an @@ -262,7 +300,7 @@ def to_key_val_list(value): if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') - if isinstance(value, collections.Mapping): + if isinstance(value, Mapping): value = value.items() return list(value) @@ -407,6 +445,31 @@ def get_encodings_from_content(content): xml_re.findall(content)) +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(';') + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1:].strip(items_to_strip) + params_dict[key] = value + return content_type, params_dict + + def get_encoding_from_headers(headers): """Returns encodings from given HTTP Header Dict. @@ -419,7 +482,7 @@ def get_encoding_from_headers(headers): if not content_type: return None - content_type, params = cgi.parse_header(content_type) + content_type, params = _parse_content_type_header(content_type) if 'charset' in params: return params['charset'].strip("'\"") @@ -632,6 +695,8 @@ def should_bypass_proxies(url, no_proxy): :rtype: bool """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) # First check whether no_proxy is defined. If it is, check that the URL @@ -639,28 +704,31 @@ def should_bypass_proxies(url, no_proxy): no_proxy_arg = no_proxy if no_proxy is None: no_proxy = get_proxy('no_proxy') - netloc = urlparse(url).netloc + parsed = urlparse(url) if no_proxy: # We need to check whether we match here. We need to see if we match - # the end of the netloc, both with and without the port. + # the end of the hostname, both with and without the port. no_proxy = ( host for host in no_proxy.replace(' ', '').split(',') if host ) - ip = netloc.split(':')[0] - if is_ipv4_address(ip): + if is_ipv4_address(parsed.hostname): for proxy_ip in no_proxy: if is_valid_cidr(proxy_ip): - if address_in_network(ip, proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): return True - elif ip == proxy_ip: + elif parsed.hostname == proxy_ip: # If no_proxy ip was defined in plain IP notation instead of cidr notation & # matches the IP of the index return True else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += ':{0}'.format(parsed.port) + for host in no_proxy: - if netloc.endswith(host) or netloc.split(':')[0].endswith(host): + if parsed.hostname.endswith(host) or host_with_port.endswith(host): # The URL does match something in no_proxy, so we don't want # to apply the proxies on this URL. return True @@ -673,7 +741,7 @@ def should_bypass_proxies(url, no_proxy): # legitimate problems. with set_environ('no_proxy', no_proxy_arg): try: - bypass = proxy_bypass(netloc) + bypass = proxy_bypass(parsed.hostname) except (TypeError, socket.gaierror): bypass = False @@ -743,7 +811,7 @@ def default_headers(): def parse_header_links(value): - """Return a dict of parsed link headers proxies. + """Return a list of parsed link headers proxies. i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" @@ -754,6 +822,10 @@ def parse_header_links(value): replace_chars = ' \'"' + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(', *<', value): try: url, params = val.split(';', 1) diff --git a/pipenv/patched/notpip/_vendor/urllib3/__init__.py b/pipenv/patched/notpip/_vendor/urllib3/__init__.py index aaa6b1c6d9..4bd533b5b4 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/__init__.py +++ b/pipenv/patched/notpip/_vendor/urllib3/__init__.py @@ -32,7 +32,7 @@ def emit(self, record): __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.22' +__version__ = '1.23' __all__ = ( 'HTTPConnectionPool', diff --git a/pipenv/patched/notpip/_vendor/urllib3/_collections.py b/pipenv/patched/notpip/_vendor/urllib3/_collections.py index 5df2372c4e..6e36b84e59 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/_collections.py +++ b/pipenv/patched/notpip/_vendor/urllib3/_collections.py @@ -1,5 +1,8 @@ from __future__ import absolute_import -from collections import Mapping, MutableMapping +try: + from collections.abc import Mapping, MutableMapping +except ImportError: + from collections import Mapping, MutableMapping try: from threading import RLock except ImportError: # Platform-specific: No threads available @@ -15,6 +18,7 @@ def __exit__(self, exc_type, exc_value, traceback): from collections import OrderedDict except ImportError: from .packages.ordered_dict import OrderedDict +from .exceptions import InvalidHeader from .packages.six import iterkeys, itervalues, PY3 @@ -305,13 +309,22 @@ def from_httplib(cls, message): # Python 2 # python2.7 does not expose a proper API for exporting multiheaders # efficiently. This function re-reads raw lines from the message # object and extracts the multiheaders properly. + obs_fold_continued_leaders = (' ', '\t') headers = [] for line in message.headers: - if line.startswith((' ', '\t')): - key, value = headers[-1] - headers[-1] = (key, value + '\r\n' + line.rstrip()) - continue + if line.startswith(obs_fold_continued_leaders): + if not headers: + # We received a header line that starts with OWS as described + # in RFC-7230 S3.2.4. This indicates a multiline header, but + # there exists no previous header to which we can attach it. + raise InvalidHeader( + 'Header continuation with no previous header: %s' % line + ) + else: + key, value = headers[-1] + headers[-1] = (key, value + ' ' + line.strip()) + continue key, value = line.split(':', 1) headers.append((key, value.strip())) diff --git a/pipenv/patched/notpip/_vendor/urllib3/connection.py b/pipenv/patched/notpip/_vendor/urllib3/connection.py index c0d8329985..a03b573f01 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/connection.py +++ b/pipenv/patched/notpip/_vendor/urllib3/connection.py @@ -56,10 +56,11 @@ class ConnectionError(Exception): 'https': 443, } -# When updating RECENT_DATE, move it to -# within two years of the current date, and no -# earlier than 6 months ago. -RECENT_DATE = datetime.date(2016, 1, 1) +# When updating RECENT_DATE, move it to within two years of the current date, +# and not less than 6 months ago. +# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or +# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months) +RECENT_DATE = datetime.date(2017, 6, 30) class DummyConnection(object): @@ -124,6 +125,35 @@ def __init__(self, *args, **kw): # Superclass also sets self.source_address in Python 2.7+. _HTTPConnection.__init__(self, *args, **kw) + @property + def host(self): + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip('.') + + @host.setter + def host(self, value): + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. @@ -138,7 +168,7 @@ def _new_conn(self): try: conn = connection.create_connection( - (self.host, self.port), self.timeout, **extra_kw) + (self._dns_host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError( diff --git a/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py b/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py index ec9600f8fa..8fcb0bce79 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py +++ b/pipenv/patched/notpip/_vendor/urllib3/connectionpool.py @@ -40,13 +40,10 @@ from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout -from .util.url import get_host, Url +from .util.url import get_host, Url, NORMALIZABLE_SCHEMES +from .util.queue import LifoQueue -if six.PY2: - # Queue is imported for side effects on MS Windows - import Queue as _unused_module_Queue # noqa: F401 - xrange = six.moves.xrange log = logging.getLogger(__name__) @@ -62,13 +59,13 @@ class ConnectionPool(object): """ scheme = None - QueueCls = queue.LifoQueue + QueueCls = LifoQueue def __init__(self, host, port=None): if not host: raise LocationValueError("No host specified.") - self.host = _ipv6_host(host).lower() + self.host = _ipv6_host(host, self.scheme) self._proxy_host = host.lower() self.port = port @@ -204,8 +201,8 @@ def _new_conn(self): Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 - log.debug("Starting new HTTP connection (%d): %s", - self.num_connections, self.host) + log.debug("Starting new HTTP connection (%d): %s:%s", + self.num_connections, self.host, self.port or "80") conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, @@ -411,6 +408,8 @@ def close(self): """ Close all pooled connections and disable the pool. """ + if self.pool is None: + return # Disable access to the pool old_pool, self.pool = self.pool, None @@ -434,7 +433,7 @@ def is_same_host(self, url): # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) - host = _ipv6_host(host).lower() + host = _ipv6_host(host, self.scheme) # Use explicit default port for comparison when none is given if self.port and not port: @@ -820,8 +819,8 @@ def _new_conn(self): Return a fresh :class:`httplib.HTTPSConnection`. """ self.num_connections += 1 - log.debug("Starting new HTTPS connection (%d): %s", - self.num_connections, self.host) + log.debug("Starting new HTTPS connection (%d): %s:%s", + self.num_connections, self.host, self.port or "443") if not self.ConnectionCls or self.ConnectionCls is DummyConnection: raise SSLError("Can't connect to HTTPS URL because the SSL " @@ -886,7 +885,7 @@ def connection_from_url(url, **kw): return HTTPConnectionPool(host, port=port, **kw) -def _ipv6_host(host): +def _ipv6_host(host, scheme): """ Process IPv6 address literals """ @@ -902,4 +901,6 @@ def _ipv6_host(host): # percent sign might be URIencoded, convert it back into ASCII if host.startswith('[') and host.endswith(']'): host = host.replace('%25', '%').strip('[]') + if scheme in NORMALIZABLE_SCHEMES: + host = host.lower() return host diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/low_level.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/low_level.py index 5e3494bce6..b13cd9e72c 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/low_level.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/_securetransport/low_level.py @@ -111,6 +111,9 @@ def _cert_array_from_pem(pem_bundle): Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. """ + # Normalize the PEM bundle's line endings. + pem_bundle = pem_bundle.replace(b"\r\n", b"\n") + der_certs = [ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) @@ -183,8 +186,8 @@ def _temporary_keychain(): # some random bytes to password-protect the keychain we're creating, so we # ask for 40 random bytes. random_bytes = os.urandom(40) - filename = base64.b64encode(random_bytes[:8]).decode('utf-8') - password = base64.b64encode(random_bytes[8:]) # Must be valid UTF-8 + filename = base64.b16encode(random_bytes[:8]).decode('utf-8') + password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 tempdirectory = tempfile.mkdtemp() keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py index 504d5bf0d4..065863521b 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/appengine.py @@ -236,12 +236,21 @@ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): encodings.remove('chunked') urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) - return HTTPResponse( + original_response = HTTPResponse( # In order for decoding to work, we must present the content as # a file-like object. + body=BytesIO(urlfetch_resp.content), + msg=urlfetch_resp.header_msg, + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + **response_kw + ) + + return HTTPResponse( body=BytesIO(urlfetch_resp.content), headers=urlfetch_resp.headers, status=urlfetch_resp.status_code, + original_response=original_response, **response_kw ) diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/pyopenssl.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/pyopenssl.py index 62bd3e131f..7787d4e415 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/pyopenssl.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/pyopenssl.py @@ -47,6 +47,12 @@ from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend from cryptography.hazmat.backends.openssl.x509 import _Certificate +try: + from cryptography.x509 import UnsupportedExtension +except ImportError: + # UnsupportedExtension is gone in cryptography >= 2.1.0 + class UnsupportedExtension(Exception): + pass from socket import timeout, error as SocketError from io import BytesIO @@ -199,7 +205,7 @@ def get_subj_alt_name(peer_cert): except x509.ExtensionNotFound: # No such extension, return the empty list. return [] - except (x509.DuplicateExtension, x509.UnsupportedExtension, + except (x509.DuplicateExtension, UnsupportedExtension, x509.UnsupportedGeneralNameType, UnicodeError) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. @@ -267,8 +273,7 @@ def recv(self, *args, **kwargs): else: raise except OpenSSL.SSL.WantReadError: - rd = util.wait_for_read(self.socket, self.socket.gettimeout()) - if not rd: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): raise timeout('The read operation timed out') else: return self.recv(*args, **kwargs) @@ -289,8 +294,7 @@ def recv_into(self, *args, **kwargs): else: raise except OpenSSL.SSL.WantReadError: - rd = util.wait_for_read(self.socket, self.socket.gettimeout()) - if not rd: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): raise timeout('The read operation timed out') else: return self.recv_into(*args, **kwargs) @@ -303,8 +307,7 @@ def _send_until_done(self, data): try: return self.connection.send(data) except OpenSSL.SSL.WantWriteError: - wr = util.wait_for_write(self.socket, self.socket.gettimeout()) - if not wr: + if not util.wait_for_write(self.socket, self.socket.gettimeout()): raise timeout() continue except OpenSSL.SSL.SysCallError as e: @@ -418,7 +421,7 @@ def load_verify_locations(self, cafile=None, capath=None, cadata=None): self._ctx.load_verify_locations(BytesIO(cadata)) def load_cert_chain(self, certfile, keyfile=None, password=None): - self._ctx.use_certificate_file(certfile) + self._ctx.use_certificate_chain_file(certfile) if password is not None: self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) self._ctx.use_privatekey_file(keyfile or certfile) @@ -440,8 +443,7 @@ def wrap_socket(self, sock, server_side=False, try: cnx.do_handshake() except OpenSSL.SSL.WantReadError: - rd = util.wait_for_read(sock, sock.gettimeout()) - if not rd: + if not util.wait_for_read(sock, sock.gettimeout()): raise timeout('select timed out') continue except OpenSSL.SSL.Error as e: diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py index 2cac70f7f7..77cb59ed71 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/securetransport.py @@ -51,11 +51,6 @@ _fileobject = None from ..packages.backports.makefile import backport_makefile -try: - memoryview(b'') -except NameError: - raise ImportError("SecureTransport only works on Pythons with memoryview") - __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI always works @@ -88,7 +83,7 @@ SSL_WRITE_BLOCKSIZE = 16384 # This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to -# individual cipher suites. We need to do this becuase this is how +# individual cipher suites. We need to do this because this is how # SecureTransport wants them. CIPHER_SUITES = [ SecurityConst.TLS_AES_256_GCM_SHA384, @@ -195,21 +190,18 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): timeout = wrapped_socket.gettimeout() error = None read_count = 0 - buffer = (ctypes.c_char * requested_length).from_address(data_buffer) - buffer_view = memoryview(buffer) try: while read_count < requested_length: if timeout is None or timeout >= 0: - readables = util.wait_for_read([base_socket], timeout) - if not readables: + if not util.wait_for_read(base_socket, timeout): raise socket.error(errno.EAGAIN, 'timed out') - # We need to tell ctypes that we have a buffer that can be - # written to. Upsettingly, we do that like this: - chunk_size = base_socket.recv_into( - buffer_view[read_count:requested_length] + remaining = requested_length - read_count + buffer = (ctypes.c_char * remaining).from_address( + data_buffer + read_count ) + chunk_size = base_socket.recv_into(buffer, remaining) read_count += chunk_size if not chunk_size: if not read_count: @@ -219,7 +211,8 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): error = e.errno if error is not None and error != errno.EAGAIN: - if error == errno.ECONNRESET: + data_length_pointer[0] = read_count + if error == errno.ECONNRESET or error == errno.EPIPE: return SecurityConst.errSSLClosedAbort raise @@ -257,8 +250,7 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): try: while sent < bytes_to_write: if timeout is None or timeout >= 0: - writables = util.wait_for_write([base_socket], timeout) - if not writables: + if not util.wait_for_write(base_socket, timeout): raise socket.error(errno.EAGAIN, 'timed out') chunk_sent = base_socket.send(data) sent += chunk_sent @@ -270,11 +262,13 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): error = e.errno if error is not None and error != errno.EAGAIN: - if error == errno.ECONNRESET: + data_length_pointer[0] = sent + if error == errno.ECONNRESET or error == errno.EPIPE: return SecurityConst.errSSLClosedAbort raise data_length_pointer[0] = sent + if sent != bytes_to_write: return SecurityConst.errSSLWouldBlock @@ -399,7 +393,7 @@ def _custom_validate(self, verify, trust_bundle): if trust: CoreFoundation.CFRelease(trust) - if cert_array is None: + if cert_array is not None: CoreFoundation.CFRelease(cert_array) # Ok, now we can look at what the result was. diff --git a/pipenv/patched/notpip/_vendor/urllib3/contrib/socks.py b/pipenv/patched/notpip/_vendor/urllib3/contrib/socks.py index 39e92fde19..811e312ec8 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/contrib/socks.py +++ b/pipenv/patched/notpip/_vendor/urllib3/contrib/socks.py @@ -152,6 +152,10 @@ def __init__(self, proxy_url, username=None, password=None, num_pools=10, headers=None, **connection_pool_kw): parsed = parse_url(proxy_url) + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(':') + if len(split) == 2: + username, password = split if parsed.scheme == 'socks5': socks_version = socks.PROXY_TYPE_SOCKS5 rdns = False diff --git a/pipenv/patched/notpip/_vendor/urllib3/exceptions.py b/pipenv/patched/notpip/_vendor/urllib3/exceptions.py index 6c4be58106..7bbaa9871f 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/exceptions.py +++ b/pipenv/patched/notpip/_vendor/urllib3/exceptions.py @@ -154,7 +154,7 @@ class ResponseError(HTTPError): class SecurityWarning(HTTPWarning): - "Warned when perfoming security reducing actions" + "Warned when performing security reducing actions" pass diff --git a/pipenv/patched/notpip/_vendor/urllib3/fields.py b/pipenv/patched/notpip/_vendor/urllib3/fields.py index 19b0ae0c88..37fe64a3e8 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/fields.py +++ b/pipenv/patched/notpip/_vendor/urllib3/fields.py @@ -121,7 +121,7 @@ def _render_parts(self, header_parts): 'Content-Disposition' fields. :param header_parts: - A sequence of (k, v) typles or a :class:`dict` of (k, v) to format + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`. """ parts = [] diff --git a/pipenv/patched/notpip/_vendor/urllib3/filepost.py b/pipenv/patched/notpip/_vendor/urllib3/filepost.py index cd11cee464..78f1e19b0e 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/filepost.py +++ b/pipenv/patched/notpip/_vendor/urllib3/filepost.py @@ -1,7 +1,8 @@ from __future__ import absolute_import +import binascii import codecs +import os -from uuid import uuid4 from io import BytesIO from .packages import six @@ -15,7 +16,10 @@ def choose_boundary(): """ Our embarrassingly-simple replacement for mimetools.choose_boundary. """ - return uuid4().hex + boundary = binascii.hexlify(os.urandom(16)) + if six.PY3: + boundary = boundary.decode('ascii') + return boundary def iter_field_objects(fields): @@ -65,7 +69,7 @@ def encode_multipart_formdata(fields, boundary=None): :param boundary: If not specified, then a random boundary will be generated using - :func:`mimetools.choose_boundary`. + :func:`urllib3.filepost.choose_boundary`. """ body = BytesIO() if boundary is None: diff --git a/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py b/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py index 4ae91744db..506a3c9b87 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py +++ b/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py @@ -312,8 +312,9 @@ def urlopen(self, method, url, redirect=True, **kw): kw['assert_same_host'] = False kw['redirect'] = False + if 'headers' not in kw: - kw['headers'] = self.headers + kw['headers'] = self.headers.copy() if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) @@ -335,6 +336,14 @@ def urlopen(self, method, url, redirect=True, **kw): if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if (retries.remove_headers_on_redirect + and not conn.is_same_host(redirect_location)): + for header in retries.remove_headers_on_redirect: + kw['headers'].pop(header, None) + try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: @@ -358,7 +367,7 @@ class ProxyManager(PoolManager): The URL of the proxy to be used. :param proxy_headers: - A dictionary contaning headers that will be sent to the proxy. In case + A dictionary containing headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. diff --git a/pipenv/patched/notpip/_vendor/urllib3/request.py b/pipenv/patched/notpip/_vendor/urllib3/request.py index c0fddff042..1be3334113 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/request.py +++ b/pipenv/patched/notpip/_vendor/urllib3/request.py @@ -44,8 +44,8 @@ def __init__(self, headers=None): def urlopen(self, method, url, body=None, headers=None, encode_multipart=True, multipart_boundary=None, **kw): # Abstract - raise NotImplemented("Classes extending RequestMethods must implement " - "their own ``urlopen`` method.") + raise NotImplementedError("Classes extending RequestMethods must implement " + "their own ``urlopen`` method.") def request(self, method, url, fields=None, headers=None, **urlopen_kw): """ @@ -60,6 +60,8 @@ def request(self, method, url, fields=None, headers=None, **urlopen_kw): """ method = method.upper() + urlopen_kw['request_url'] = url + if method in self._encode_url_methods: return self.request_encode_url(method, url, fields=fields, headers=headers, @@ -117,7 +119,7 @@ def request_encode_body(self, method, url, fields=None, headers=None, } When uploading a file, providing a filename (the first parameter of the - tuple) is optional but recommended to best mimick behavior of browsers. + tuple) is optional but recommended to best mimic behavior of browsers. Note that if ``headers`` are supplied, the 'Content-Type' header will be overwritten because it depends on the dynamic random boundary string diff --git a/pipenv/patched/notpip/_vendor/urllib3/response.py b/pipenv/patched/notpip/_vendor/urllib3/response.py index d3e5a1e601..9873cb9423 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/response.py +++ b/pipenv/patched/notpip/_vendor/urllib3/response.py @@ -52,18 +52,42 @@ def decompress(self, data): self._data = None +class GzipDecoderState(object): + + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + class GzipDecoder(object): def __init__(self): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): - if not data: - return data - return self._obj.decompress(data) + ret = binary_type() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return ret + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return ret + raise + data = self._obj.unused_data + if not data: + return ret + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) def _get_decoder(mode): @@ -89,9 +113,8 @@ class is also compatible with the Python standard library's :mod:`io` If True, the response's body will be preloaded during construction. :param decode_content: - If True, attempts to decode specific content-encoding's based on headers - (like 'gzip' and 'deflate') will be skipped and raw data will be used - instead. + If True, will attempt to decode the body based on the + 'content-encoding' header. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse @@ -112,8 +135,9 @@ class is also compatible with the Python standard library's :mod:`io` def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None, - retries=None, enforce_content_length=False, request_method=None): + original_response=None, pool=None, connection=None, msg=None, + retries=None, enforce_content_length=False, + request_method=None, request_url=None): if isinstance(headers, HTTPHeaderDict): self.headers = headers @@ -132,6 +156,8 @@ def __init__(self, body='', headers=None, status=0, version=0, reason=None, self._fp = None self._original_response = original_response self._fp_bytes_read = 0 + self.msg = msg + self._request_url = request_url if body and isinstance(body, (basestring, binary_type)): self._body = body @@ -191,6 +217,9 @@ def data(self): def connection(self): return self._connection + def isclosed(self): + return is_fp_closed(self._fp) + def tell(self): """ Obtain the number of bytes pulled over the wire so far. May differ from @@ -205,18 +234,18 @@ def _init_length(self, request_method): """ length = self.headers.get('content-length') - if length is not None and self.chunked: - # This Response will fail with an IncompleteRead if it can't be - # received as chunked. This method falls back to attempt reading - # the response before raising an exception. - log.warning("Received response with both Content-Length and " - "Transfer-Encoding set. This is expressly forbidden " - "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " - "attempting to process response as Transfer-Encoding: " - "chunked.") - return None - - elif length is not None: + if length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning("Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked.") + return None + try: # RFC 7230 section 3.3.2 specifies multiple content lengths can # be sent in a single Content-Length header @@ -573,6 +602,11 @@ def read_chunked(self, amt=None, decode_content=None): Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. @@ -588,12 +622,17 @@ def read_chunked(self, amt=None, decode_content=None): "Body should be httplib.HTTPResponse like. " "It should have have an fp attribute which returns raw chunks.") - # Don't bother reading the body of a HEAD request. - if self._original_response and is_response_to_head(self._original_response): - self._original_response.close() - return - with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: + return + while True: self._update_chunk_length() if self.chunk_left == 0: @@ -624,3 +663,14 @@ def read_chunked(self, amt=None, decode_content=None): # We read everything; close the "file". if self._original_response: self._original_response.close() + + def geturl(self): + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + if self.retries is not None and len(self.retries.history): + return self.retries.history[-1].redirect_location + else: + return self._request_url diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/connection.py b/pipenv/patched/notpip/_vendor/urllib3/util/connection.py index bf699cfd0e..5cf488f4b5 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/connection.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/connection.py @@ -1,7 +1,6 @@ from __future__ import absolute_import import socket -from .wait import wait_for_read -from .selectors import HAS_SELECT, SelectorError +from .wait import NoWayToWaitForSocketError, wait_for_read def is_connection_dropped(conn): # Platform-specific @@ -19,14 +18,11 @@ def is_connection_dropped(conn): # Platform-specific return False if sock is None: # Connection already closed (such as by httplib). return True - - if not HAS_SELECT: - return False - try: - return bool(wait_for_read(sock, timeout=0.0)) - except SelectorError: - return True + # Returns True if readable, which here means it's been dropped + return wait_for_read(sock, timeout=0.0) + except NoWayToWaitForSocketError: # Platform-specific: AppEngine + return False # This function is copied from socket.py in the Python 2.7 standard diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/queue.py b/pipenv/patched/notpip/_vendor/urllib3/util/queue.py new file mode 100644 index 0000000000..d3d379a199 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/urllib3/util/queue.py @@ -0,0 +1,21 @@ +import collections +from ..packages import six +from ..packages.six.moves import queue + +if six.PY2: + # Queue is imported for side effects on MS Windows. See issue #229. + import Queue as _unused_module_Queue # noqa: F401 + + +class LifoQueue(queue.Queue): + def _init(self, _): + self.queue = collections.deque() + + def _qsize(self, len=len): + return len(self.queue) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/retry.py b/pipenv/patched/notpip/_vendor/urllib3/util/retry.py index c603cb4904..7ad3dc6608 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/retry.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/retry.py @@ -19,6 +19,7 @@ log = logging.getLogger(__name__) + # Data structure for representing the metadata of requests that result in a retry. RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", "status", "redirect_location"]) @@ -139,6 +140,10 @@ class Retry(object): Whether to respect Retry-After header on status codes defined as :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + :param iterable remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. """ DEFAULT_METHOD_WHITELIST = frozenset([ @@ -146,13 +151,16 @@ class Retry(object): RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) + #: Maximum backoff time. BACKOFF_MAX = 120 def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, backoff_factor=0, raise_on_redirect=True, raise_on_status=True, - history=None, respect_retry_after_header=True): + history=None, respect_retry_after_header=True, + remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): self.total = total self.connect = connect @@ -171,6 +179,7 @@ def __init__(self, total=10, connect=None, read=None, redirect=None, status=None self.raise_on_status = raise_on_status self.history = history or tuple() self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = remove_headers_on_redirect def new(self, **kw): params = dict( @@ -182,6 +191,7 @@ def new(self, **kw): raise_on_redirect=self.raise_on_redirect, raise_on_status=self.raise_on_status, history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect ) params.update(kw) return type(self)(**params) diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/selectors.py b/pipenv/patched/notpip/_vendor/urllib3/util/selectors.py deleted file mode 100644 index d75cb266bc..0000000000 --- a/pipenv/patched/notpip/_vendor/urllib3/util/selectors.py +++ /dev/null @@ -1,581 +0,0 @@ -# Backport of selectors.py from Python 3.5+ to support Python < 3.4 -# Also has the behavior specified in PEP 475 which is to retry syscalls -# in the case of an EINTR error. This module is required because selectors34 -# does not follow this behavior and instead returns that no dile descriptor -# events have occurred rather than retry the syscall. The decision to drop -# support for select.devpoll is made to maintain 100% test coverage. - -import errno -import math -import select -import socket -import sys -import time -from collections import namedtuple, Mapping - -try: - monotonic = time.monotonic -except (AttributeError, ImportError): # Python 3.3< - monotonic = time.time - -EVENT_READ = (1 << 0) -EVENT_WRITE = (1 << 1) - -HAS_SELECT = True # Variable that shows whether the platform has a selector. -_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. -_DEFAULT_SELECTOR = None - - -class SelectorError(Exception): - def __init__(self, errcode): - super(SelectorError, self).__init__() - self.errno = errcode - - def __repr__(self): - return "".format(self.errno) - - def __str__(self): - return self.__repr__() - - -def _fileobj_to_fd(fileobj): - """ Return a file descriptor from a file object. If - given an integer will simply return that integer back. """ - if isinstance(fileobj, int): - fd = fileobj - else: - try: - fd = int(fileobj.fileno()) - except (AttributeError, TypeError, ValueError): - raise ValueError("Invalid file object: {0!r}".format(fileobj)) - if fd < 0: - raise ValueError("Invalid file descriptor: {0}".format(fd)) - return fd - - -# Determine which function to use to wrap system calls because Python 3.5+ -# already handles the case when system calls are interrupted. -if sys.version_info >= (3, 5): - def _syscall_wrapper(func, _, *args, **kwargs): - """ This is the short-circuit version of the below logic - because in Python 3.5+ all system calls automatically restart - and recalculate their timeouts. """ - try: - return func(*args, **kwargs) - except (OSError, IOError, select.error) as e: - errcode = None - if hasattr(e, "errno"): - errcode = e.errno - raise SelectorError(errcode) -else: - def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): - """ Wrapper function for syscalls that could fail due to EINTR. - All functions should be retried if there is time left in the timeout - in accordance with PEP 475. """ - timeout = kwargs.get("timeout", None) - if timeout is None: - expires = None - recalc_timeout = False - else: - timeout = float(timeout) - if timeout < 0.0: # Timeout less than 0 treated as no timeout. - expires = None - else: - expires = monotonic() + timeout - - args = list(args) - if recalc_timeout and "timeout" not in kwargs: - raise ValueError( - "Timeout must be in args or kwargs to be recalculated") - - result = _SYSCALL_SENTINEL - while result is _SYSCALL_SENTINEL: - try: - result = func(*args, **kwargs) - # OSError is thrown by select.select - # IOError is thrown by select.epoll.poll - # select.error is thrown by select.poll.poll - # Aren't we thankful for Python 3.x rework for exceptions? - except (OSError, IOError, select.error) as e: - # select.error wasn't a subclass of OSError in the past. - errcode = None - if hasattr(e, "errno"): - errcode = e.errno - elif hasattr(e, "args"): - errcode = e.args[0] - - # Also test for the Windows equivalent of EINTR. - is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and - errcode == errno.WSAEINTR)) - - if is_interrupt: - if expires is not None: - current_time = monotonic() - if current_time > expires: - raise OSError(errno=errno.ETIMEDOUT) - if recalc_timeout: - if "timeout" in kwargs: - kwargs["timeout"] = expires - current_time - continue - if errcode: - raise SelectorError(errcode) - else: - raise - return result - - -SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) - - -class _SelectorMapping(Mapping): - """ Mapping of file objects to selector keys """ - - def __init__(self, selector): - self._selector = selector - - def __len__(self): - return len(self._selector._fd_to_key) - - def __getitem__(self, fileobj): - try: - fd = self._selector._fileobj_lookup(fileobj) - return self._selector._fd_to_key[fd] - except KeyError: - raise KeyError("{0!r} is not registered.".format(fileobj)) - - def __iter__(self): - return iter(self._selector._fd_to_key) - - -class BaseSelector(object): - """ Abstract Selector class - - A selector supports registering file objects to be monitored - for specific I/O events. - - A file object is a file descriptor or any object with a - `fileno()` method. An arbitrary object can be attached to the - file object which can be used for example to store context info, - a callback, etc. - - A selector can use various implementations (select(), poll(), epoll(), - and kqueue()) depending on the platform. The 'DefaultSelector' class uses - the most efficient implementation for the current platform. - """ - def __init__(self): - # Maps file descriptors to keys. - self._fd_to_key = {} - - # Read-only mapping returned by get_map() - self._map = _SelectorMapping(self) - - def _fileobj_lookup(self, fileobj): - """ Return a file descriptor from a file object. - This wraps _fileobj_to_fd() to do an exhaustive - search in case the object is invalid but we still - have it in our map. Used by unregister() so we can - unregister an object that was previously registered - even if it is closed. It is also used by _SelectorMapping - """ - try: - return _fileobj_to_fd(fileobj) - except ValueError: - - # Search through all our mapped keys. - for key in self._fd_to_key.values(): - if key.fileobj is fileobj: - return key.fd - - # Raise ValueError after all. - raise - - def register(self, fileobj, events, data=None): - """ Register a file object for a set of events to monitor. """ - if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): - raise ValueError("Invalid events: {0!r}".format(events)) - - key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) - - if key.fd in self._fd_to_key: - raise KeyError("{0!r} (FD {1}) is already registered" - .format(fileobj, key.fd)) - - self._fd_to_key[key.fd] = key - return key - - def unregister(self, fileobj): - """ Unregister a file object from being monitored. """ - try: - key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - # Getting the fileno of a closed socket on Windows errors with EBADF. - except socket.error as e: # Platform-specific: Windows. - if e.errno != errno.EBADF: - raise - else: - for key in self._fd_to_key.values(): - if key.fileobj is fileobj: - self._fd_to_key.pop(key.fd) - break - else: - raise KeyError("{0!r} is not registered".format(fileobj)) - return key - - def modify(self, fileobj, events, data=None): - """ Change a registered file object monitored events and data. """ - # NOTE: Some subclasses optimize this operation even further. - try: - key = self._fd_to_key[self._fileobj_lookup(fileobj)] - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - if events != key.events: - self.unregister(fileobj) - key = self.register(fileobj, events, data) - - elif data != key.data: - # Use a shortcut to update the data. - key = key._replace(data=data) - self._fd_to_key[key.fd] = key - - return key - - def select(self, timeout=None): - """ Perform the actual selection until some monitored file objects - are ready or the timeout expires. """ - raise NotImplementedError() - - def close(self): - """ Close the selector. This must be called to ensure that all - underlying resources are freed. """ - self._fd_to_key.clear() - self._map = None - - def get_key(self, fileobj): - """ Return the key associated with a registered file object. """ - mapping = self.get_map() - if mapping is None: - raise RuntimeError("Selector is closed") - try: - return mapping[fileobj] - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - def get_map(self): - """ Return a mapping of file objects to selector keys """ - return self._map - - def _key_from_fd(self, fd): - """ Return the key associated to a given file descriptor - Return None if it is not found. """ - try: - return self._fd_to_key[fd] - except KeyError: - return None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - -# Almost all platforms have select.select() -if hasattr(select, "select"): - class SelectSelector(BaseSelector): - """ Select-based selector. """ - def __init__(self): - super(SelectSelector, self).__init__() - self._readers = set() - self._writers = set() - - def register(self, fileobj, events, data=None): - key = super(SelectSelector, self).register(fileobj, events, data) - if events & EVENT_READ: - self._readers.add(key.fd) - if events & EVENT_WRITE: - self._writers.add(key.fd) - return key - - def unregister(self, fileobj): - key = super(SelectSelector, self).unregister(fileobj) - self._readers.discard(key.fd) - self._writers.discard(key.fd) - return key - - def _select(self, r, w, timeout=None): - """ Wrapper for select.select because timeout is a positional arg """ - return select.select(r, w, [], timeout) - - def select(self, timeout=None): - # Selecting on empty lists on Windows errors out. - if not len(self._readers) and not len(self._writers): - return [] - - timeout = None if timeout is None else max(timeout, 0.0) - ready = [] - r, w, _ = _syscall_wrapper(self._select, True, self._readers, - self._writers, timeout) - r = set(r) - w = set(w) - for fd in r | w: - events = 0 - if fd in r: - events |= EVENT_READ - if fd in w: - events |= EVENT_WRITE - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - return ready - - -if hasattr(select, "poll"): - class PollSelector(BaseSelector): - """ Poll-based selector """ - def __init__(self): - super(PollSelector, self).__init__() - self._poll = select.poll() - - def register(self, fileobj, events, data=None): - key = super(PollSelector, self).register(fileobj, events, data) - event_mask = 0 - if events & EVENT_READ: - event_mask |= select.POLLIN - if events & EVENT_WRITE: - event_mask |= select.POLLOUT - self._poll.register(key.fd, event_mask) - return key - - def unregister(self, fileobj): - key = super(PollSelector, self).unregister(fileobj) - self._poll.unregister(key.fd) - return key - - def _wrap_poll(self, timeout=None): - """ Wrapper function for select.poll.poll() so that - _syscall_wrapper can work with only seconds. """ - if timeout is not None: - if timeout <= 0: - timeout = 0 - else: - # select.poll.poll() has a resolution of 1 millisecond, - # round away from zero to wait *at least* timeout seconds. - timeout = math.ceil(timeout * 1e3) - - result = self._poll.poll(timeout) - return result - - def select(self, timeout=None): - ready = [] - fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) - for fd, event_mask in fd_events: - events = 0 - if event_mask & ~select.POLLIN: - events |= EVENT_WRITE - if event_mask & ~select.POLLOUT: - events |= EVENT_READ - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - - return ready - - -if hasattr(select, "epoll"): - class EpollSelector(BaseSelector): - """ Epoll-based selector """ - def __init__(self): - super(EpollSelector, self).__init__() - self._epoll = select.epoll() - - def fileno(self): - return self._epoll.fileno() - - def register(self, fileobj, events, data=None): - key = super(EpollSelector, self).register(fileobj, events, data) - events_mask = 0 - if events & EVENT_READ: - events_mask |= select.EPOLLIN - if events & EVENT_WRITE: - events_mask |= select.EPOLLOUT - _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) - return key - - def unregister(self, fileobj): - key = super(EpollSelector, self).unregister(fileobj) - try: - _syscall_wrapper(self._epoll.unregister, False, key.fd) - except SelectorError: - # This can occur when the fd was closed since registry. - pass - return key - - def select(self, timeout=None): - if timeout is not None: - if timeout <= 0: - timeout = 0.0 - else: - # select.epoll.poll() has a resolution of 1 millisecond - # but luckily takes seconds so we don't need a wrapper - # like PollSelector. Just for better rounding. - timeout = math.ceil(timeout * 1e3) * 1e-3 - timeout = float(timeout) - else: - timeout = -1.0 # epoll.poll() must have a float. - - # We always want at least 1 to ensure that select can be called - # with no file descriptors registered. Otherwise will fail. - max_events = max(len(self._fd_to_key), 1) - - ready = [] - fd_events = _syscall_wrapper(self._epoll.poll, True, - timeout=timeout, - maxevents=max_events) - for fd, event_mask in fd_events: - events = 0 - if event_mask & ~select.EPOLLIN: - events |= EVENT_WRITE - if event_mask & ~select.EPOLLOUT: - events |= EVENT_READ - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - return ready - - def close(self): - self._epoll.close() - super(EpollSelector, self).close() - - -if hasattr(select, "kqueue"): - class KqueueSelector(BaseSelector): - """ Kqueue / Kevent-based selector """ - def __init__(self): - super(KqueueSelector, self).__init__() - self._kqueue = select.kqueue() - - def fileno(self): - return self._kqueue.fileno() - - def register(self, fileobj, events, data=None): - key = super(KqueueSelector, self).register(fileobj, events, data) - if events & EVENT_READ: - kevent = select.kevent(key.fd, - select.KQ_FILTER_READ, - select.KQ_EV_ADD) - - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - - if events & EVENT_WRITE: - kevent = select.kevent(key.fd, - select.KQ_FILTER_WRITE, - select.KQ_EV_ADD) - - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - - return key - - def unregister(self, fileobj): - key = super(KqueueSelector, self).unregister(fileobj) - if key.events & EVENT_READ: - kevent = select.kevent(key.fd, - select.KQ_FILTER_READ, - select.KQ_EV_DELETE) - try: - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - except SelectorError: - pass - if key.events & EVENT_WRITE: - kevent = select.kevent(key.fd, - select.KQ_FILTER_WRITE, - select.KQ_EV_DELETE) - try: - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - except SelectorError: - pass - - return key - - def select(self, timeout=None): - if timeout is not None: - timeout = max(timeout, 0) - - max_events = len(self._fd_to_key) * 2 - ready_fds = {} - - kevent_list = _syscall_wrapper(self._kqueue.control, True, - None, max_events, timeout) - - for kevent in kevent_list: - fd = kevent.ident - event_mask = kevent.filter - events = 0 - if event_mask == select.KQ_FILTER_READ: - events |= EVENT_READ - if event_mask == select.KQ_FILTER_WRITE: - events |= EVENT_WRITE - - key = self._key_from_fd(fd) - if key: - if key.fd not in ready_fds: - ready_fds[key.fd] = (key, events & key.events) - else: - old_events = ready_fds[key.fd][1] - ready_fds[key.fd] = (key, (events | old_events) & key.events) - - return list(ready_fds.values()) - - def close(self): - self._kqueue.close() - super(KqueueSelector, self).close() - - -if not hasattr(select, 'select'): # Platform-specific: AppEngine - HAS_SELECT = False - - -def _can_allocate(struct): - """ Checks that select structs can be allocated by the underlying - operating system, not just advertised by the select module. We don't - check select() because we'll be hopeful that most platforms that - don't have it available will not advertise it. (ie: GAE) """ - try: - # select.poll() objects won't fail until used. - if struct == 'poll': - p = select.poll() - p.poll(0) - - # All others will fail on allocation. - else: - getattr(select, struct)().close() - return True - except (OSError, AttributeError) as e: - return False - - -# Choose the best implementation, roughly: -# kqueue == epoll > poll > select. Devpoll not supported. (See above) -# select() also can't accept a FD > FD_SETSIZE (usually around 1024) -def DefaultSelector(): - """ This function serves as a first call for DefaultSelector to - detect if the select module is being monkey-patched incorrectly - by eventlet, greenlet, and preserve proper behavior. """ - global _DEFAULT_SELECTOR - if _DEFAULT_SELECTOR is None: - if _can_allocate('kqueue'): - _DEFAULT_SELECTOR = KqueueSelector - elif _can_allocate('epoll'): - _DEFAULT_SELECTOR = EpollSelector - elif _can_allocate('poll'): - _DEFAULT_SELECTOR = PollSelector - elif hasattr(select, 'select'): - _DEFAULT_SELECTOR = SelectSelector - else: # Platform-specific: AppEngine - raise ValueError('Platform does not have a selector') - return _DEFAULT_SELECTOR() diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/ssl_.py b/pipenv/patched/notpip/_vendor/urllib3/util/ssl_.py index 0d0799b9fd..a0868c22c4 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/ssl_.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/ssl_.py @@ -2,11 +2,13 @@ import errno import warnings import hmac +import socket from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning +from ..packages import six SSLContext = None @@ -53,6 +55,27 @@ def _const_compare_digest_backport(a, b): OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_COMPRESSION = 0x20000 + +# Python 2.7 and earlier didn't have inet_pton on non-Linux +# so we fallback on inet_aton in those cases. This means that +# we can only detect IPv4 addresses in this case. +if hasattr(socket, 'inet_pton'): + inet_pton = socket.inet_pton +else: + # Maybe we can use ipaddress if the user has urllib3[secure]? + try: + from pipenv.patched.notpip._vendor import ipaddress + + def inet_pton(_, host): + if isinstance(host, six.binary_type): + host = host.decode('ascii') + return ipaddress.ip_address(host) + + except ImportError: # Platform-specific: Non-Linux + def inet_pton(_, host): + return socket.inet_aton(host) + + # A secure default. # Sources for more information on TLS ciphers: # @@ -183,7 +206,7 @@ def resolve_cert_reqs(candidate): the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_NONE`. If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbrevation. + :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. @@ -325,17 +348,49 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if certfile: context.load_cert_chain(certfile, keyfile) - if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI - return context.wrap_socket(sock, server_hostname=server_hostname) - - warnings.warn( - 'An HTTPS request has been made, but the SNI (Subject Name ' - 'Indication) extension to TLS is not available on this platform. ' - 'This may cause the server to present an incorrect TLS ' - 'certificate, which can cause validation failures. You can upgrade to ' - 'a newer version of Python to solve this. For more information, see ' - 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' - '#ssl-warnings', - SNIMissingWarning - ) + + # If we detect server_hostname is an IP address then the SNI + # extension should not be used according to RFC3546 Section 3.1 + # We shouldn't warn the user if SNI isn't available but we would + # not be using SNI anyways due to IP address for server_hostname. + if ((server_hostname is not None and not is_ipaddress(server_hostname)) + or IS_SECURETRANSPORT): + if HAS_SNI and server_hostname is not None: + return context.wrap_socket(sock, server_hostname=server_hostname) + + warnings.warn( + 'An HTTPS request has been made, but the SNI (Server Name ' + 'Indication) extension to TLS is not available on this platform. ' + 'This may cause the server to present an incorrect TLS ' + 'certificate, which can cause validation failures. You can upgrade to ' + 'a newer version of Python to solve this. For more information, see ' + 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' + '#ssl-warnings', + SNIMissingWarning + ) + return context.wrap_socket(sock) + + +def is_ipaddress(hostname): + """Detects whether the hostname given is an IP address. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if six.PY3 and isinstance(hostname, six.binary_type): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode('ascii') + + families = [socket.AF_INET] + if hasattr(socket, 'AF_INET6'): + families.append(socket.AF_INET6) + + for af in families: + try: + inet_pton(af, hostname) + except (socket.error, ValueError, OSError): + pass + else: + return True + return False diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/wait.py b/pipenv/patched/notpip/_vendor/urllib3/util/wait.py index cb396e508c..fa686eff48 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/wait.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/wait.py @@ -1,40 +1,153 @@ -from .selectors import ( - HAS_SELECT, - DefaultSelector, - EVENT_READ, - EVENT_WRITE -) - - -def _wait_for_io_events(socks, events, timeout=None): - """ Waits for IO events to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be interacted with immediately. """ - if not HAS_SELECT: - raise ValueError('Platform does not have a selector') - if not isinstance(socks, list): - # Probably just a single socket. - if hasattr(socks, "fileno"): - socks = [socks] - # Otherwise it might be a non-list iterable. +import errno +from functools import partial +import select +import sys +try: + from time import monotonic +except ImportError: + from time import time as monotonic + +__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] + + +class NoWayToWaitForSocketError(Exception): + pass + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + +if sys.version_info >= (3, 5): + # Modern Python, that retries syscalls by default + def _retry_on_intr(fn, timeout): + return fn(timeout) +else: + # Old and broken Pythons. + def _retry_on_intr(fn, timeout): + if timeout is not None and timeout <= 0: + return fn(timeout) + + if timeout is None: + deadline = float("inf") else: - socks = list(socks) - with DefaultSelector() as selector: - for sock in socks: - selector.register(sock, events) - return [key[0].fileobj for key in - selector.select(timeout) if key[1] & events] - - -def wait_for_read(socks, timeout=None): - """ Waits for reading to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be read from immediately. """ - return _wait_for_io_events(socks, EVENT_READ, timeout) - - -def wait_for_write(socks, timeout=None): - """ Waits for writing to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be written to immediately. """ - return _wait_for_io_events(socks, EVENT_WRITE, timeout) + deadline = monotonic() + timeout + + while True: + try: + return fn(timeout) + # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 + except (OSError, select.error) as e: + # 'e.args[0]' incantation works for both OSError and select.error + if e.args[0] != errno.EINTR: + raise + else: + timeout = deadline - monotonic() + if timeout < 0: + timeout = 0 + if timeout == float("inf"): + timeout = None + continue + + +def select_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = _retry_on_intr(fn, timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t): + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(_retry_on_intr(do_poll, timeout)) + + +def null_wait_for_socket(*args, **kwargs): + raise NoWayToWaitForSocketError("no select-equivalent available") + + +def _have_working_poll(): + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + poll_obj.poll(0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket(*args, **kwargs): + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + else: # Platform-specific: Appengine. + wait_for_socket = null_wait_for_socket + return wait_for_socket(*args, **kwargs) + + +def wait_for_read(sock, timeout=None): + """ Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock, timeout=None): + """ Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/pipenv/patched/notpip/_vendor/vendor.txt b/pipenv/patched/notpip/_vendor/vendor.txt index 3994e709a1..b9854e9ad1 100644 --- a/pipenv/patched/notpip/_vendor/vendor.txt +++ b/pipenv/patched/notpip/_vendor/vendor.txt @@ -1,22 +1,22 @@ appdirs==1.4.3 distlib==0.2.7 -distro==1.2.0 +distro==1.3.0 html5lib==1.0.1 six==1.11.0 colorama==0.3.9 -CacheControl==0.12.4 +CacheControl==0.12.5 msgpack-python==0.5.6 lockfile==0.12.2 -progress==1.3 -ipaddress==1.0.19 # Only needed on 2.6 and 2.7 +progress==1.4 +ipaddress==1.0.22 # Only needed on 2.6 and 2.7 packaging==17.1 pyparsing==2.2.0 -pytoml==0.1.14 +pytoml==0.1.16 retrying==1.3.3 -requests==2.18.4 +requests==2.19.1 chardet==3.0.4 - idna==2.6 - urllib3==1.22 - certifi==2018.1.18 -setuptools==39.1.0 + idna==2.7 + urllib3==1.23 + certifi==2018.4.16 +setuptools==39.2.0 webencodings==0.5.1 From 9fce94bab9f6ce732bb1dafd52469f502765dd9e Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:32:14 -0400 Subject: [PATCH 12/35] Import rewrites Signed-off-by: Dan Ryan --- pipenv/patched/piptools/cache.py | 2 +- pipenv/patched/piptools/locations.py | 2 +- pipenv/patched/piptools/utils.py | 6 +++--- pipenv/vendor/click/_unicodefun.py | 2 +- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/models/utils.py | 2 +- pipenv/vendor/yaspin/LICENSE | 21 +++++++++++++++++++ 7 files changed, 29 insertions(+), 8 deletions(-) create mode 100644 pipenv/vendor/yaspin/LICENSE diff --git a/pipenv/patched/piptools/cache.py b/pipenv/patched/piptools/cache.py index 7595b964c2..610a4f37d1 100644 --- a/pipenv/patched/piptools/cache.py +++ b/pipenv/patched/piptools/cache.py @@ -6,7 +6,7 @@ import os import sys -from pip._vendor.packaging.requirements import Requirement +from pipenv.patched.notpip._vendor.packaging.requirements import Requirement from .exceptions import PipToolsError from .locations import CACHE_DIR diff --git a/pipenv/patched/piptools/locations.py b/pipenv/patched/piptools/locations.py index 4e6174c550..0d460f640c 100644 --- a/pipenv/patched/piptools/locations.py +++ b/pipenv/patched/piptools/locations.py @@ -4,7 +4,7 @@ from .click import secho from ._compat import user_cache_dir -# The user_cache_dir helper comes straight from pip itself +# The user_cache_dir helper comes straight from pipenv.patched.notpip itself CACHE_DIR = user_cache_dir('pip-tools') # NOTE diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index 6225d7e21a..a7259937fc 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -12,9 +12,9 @@ from ._compat import InstallRequirement from .click import style -from pip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier -from pip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version -from pip._vendor.packaging.markers import Marker, Op, Value, Variable +from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier +from pipenv.patched.notpip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version +from pipenv.patched.notpip._vendor.packaging.markers import Marker, Op, Value, Variable UNSAFE_PACKAGES = {'setuptools', 'distribute', 'pip'} diff --git a/pipenv/vendor/click/_unicodefun.py b/pipenv/vendor/click/_unicodefun.py index ff58c78bb3..9e17a384ef 100644 --- a/pipenv/vendor/click/_unicodefun.py +++ b/pipenv/vendor/click/_unicodefun.py @@ -114,5 +114,5 @@ def _verify_python3_env(): raise RuntimeError('Click will abort further execution because Python 3 ' 'was configured to use ASCII as encoding for the ' - 'environment. Consult http://click.pocoo.org/python3/ ' + 'environment. Consult http://click.pocoo.org/python3/' 'for mitigation steps.' + extra) diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 0faea40b4e..01a56efa6e 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.1.7.dev0' +__version__ = '1.1.6' from .exceptions import RequirementError diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 6320236ee9..2c6ef7e00b 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -117,7 +117,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): - """Cleans VCS uris from pip format""" + """Cleans VCS uris from pipenv.patched.notpip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: diff --git a/pipenv/vendor/yaspin/LICENSE b/pipenv/vendor/yaspin/LICENSE new file mode 100644 index 0000000000..2458104e5a --- /dev/null +++ b/pipenv/vendor/yaspin/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Pavlo Dmytrenko + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. From 4a5b63a0bb91d2674b8d0fb36c2df1827f114011 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 7 Sep 2018 02:32:29 -0400 Subject: [PATCH 13/35] Fix passa vendoring task Signed-off-by: Dan Ryan --- tasks/vendoring/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 1441807957..3c5d8d9987 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -629,6 +629,6 @@ def main(ctx, package=None): download_licenses(ctx, _vendor_dir) from .vendor_passa import vendor_passa log("Vendoring passa...") - vendor_passa() + vendor_passa(ctx) # update_safety(ctx) log('Revendoring complete') From c257891681c2a08d035d224789b666c01f577d08 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Oct 2018 06:47:14 -0400 Subject: [PATCH 14/35] Update vendored dependencies Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 31 +++++++++---------- tasks/vendoring/patches/patched/pip18.patch | 2 +- .../vendoring/patches/patched/piptools.patch | 5 ++- 3 files changed, 18 insertions(+), 20 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 5ba19048c0..b9f091e193 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -2,11 +2,11 @@ appdirs==1.4.3 backports.shutil_get_terminal_size==1.0.0 backports.weakref==1.0.post1 blindspin==2.0.1 -click==6.7 +click==7.0 click-completion==0.4.1 click-didyoumean==0.0.3 colorama==0.3.9 -delegator.py==0.1.0 +delegator.py==0.1.1 pexpect==4.6.0 ptyprocess==0.6.0 python-dotenv==0.9.1 @@ -21,29 +21,28 @@ pipdeptree==0.13.0 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.0.2 +pythonfinder==1.1.0 requests==2.19.1 chardet==3.0.4 idna==2.7 urllib3==1.23 certifi==2018.8.24 -requirementslib==1.1.6 - attrs==18.1.0 - distlib==0.2.7 - packaging==17.1 - pyparsing==2.2.0 - pytoml==0.1.18 - plette==0.1.1 - tomlkit==0.4.2 -shellingham==1.2.4 +requirementslib==1.1.7 + attrs==18.2.0 + distlib==0.2.8 + packaging==18.0 + pyparsing==2.2.2 + pytoml==0.1.19 + plette==0.2.2 + tomlkit==0.4.4 +shellingham==1.2.6 six==1.11.0 semver==2.8.1 shutilwhich==1.1.0 -toml==0.9.4 +toml==0.10.0 cached-property==1.4.3 -vistir==0.1.5 -pip-shims==0.1.2 - modutil==2.0.0 +vistir==0.1.6 +pip-shims==0.3.0 ptyprocess==0.6.0 enum34==1.1.6 yaspin==0.14.0 diff --git a/tasks/vendoring/patches/patched/pip18.patch b/tasks/vendoring/patches/patched/pip18.patch index 539825c2ee..60d06dcf6b 100644 --- a/tasks/vendoring/patches/patched/pip18.patch +++ b/tasks/vendoring/patches/patched/pip18.patch @@ -95,7 +95,7 @@ index 8c0ec82c..ad00ba04 100644 return files, urls - def _candidate_sort_key(self, candidate): -+ def _candidate_sort_key(self, candidate, ignore_compatibility=False): ++ def _candidate_sort_key(self, candidate, ignore_compatibility=True): """ Function used to generate link sort key for link tuples. The greater the return value, the more preferred it is. diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 1f1ab655be..90c420b989 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -300,8 +300,7 @@ index bf69803..eb20560 100644 'download_dir': download_dir, 'wheel_download_dir': self._wheel_download_dir, 'progress_bar': 'off', -- 'build_isolation': False -+ 'build_isolation': True + 'build_isolation': False } resolver_kwargs = { 'finder': self.finder, @@ -317,7 +316,7 @@ index bf69803..eb20560 100644 'wheel_cache': wheel_cache, - 'use_user_site': False + 'use_user_site': False, -+ 'ignore_compatibility': True ++ 'ignore_compatibility': False } resolver = None preparer = None From ad2d6c93636d02e0de022be1451013e9aa532788 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Oct 2018 14:28:02 -0400 Subject: [PATCH 15/35] Update all vendored dependencies - Verified all license changes - Brings us current with the latest releases - Didn't identify any breaking changes in click Signed-off-by: Dan Ryan --- Pipfile.lock | 299 ++++-- pipenv/patched/piptools/repositories/pypi.py | 8 +- pipenv/vendor/attr/__init__.py | 16 +- pipenv/vendor/attr/__init__.pyi | 252 +++++ pipenv/vendor/attr/_compat.py | 45 +- pipenv/vendor/attr/_funcs.py | 150 ++- pipenv/vendor/attr/_make.py | 897 ++++++++++++----- pipenv/vendor/attr/converters.py | 56 +- pipenv/vendor/attr/converters.pyi | 12 + pipenv/vendor/attr/exceptions.py | 9 + pipenv/vendor/attr/exceptions.pyi | 7 + pipenv/vendor/attr/filters.pyi | 5 + pipenv/vendor/attr/py.typed | 0 pipenv/vendor/attr/validators.py | 62 +- pipenv/vendor/attr/validators.pyi | 14 + pipenv/vendor/click/LICENSE | 38 - pipenv/vendor/click/LICENSE.rst | 39 + pipenv/vendor/click/__init__.py | 27 +- pipenv/vendor/click/_bashcomplete.py | 268 ++++- pipenv/vendor/click/_compat.py | 97 +- pipenv/vendor/click/_termui_impl.py | 300 +++--- pipenv/vendor/click/_unicodefun.py | 23 +- pipenv/vendor/click/_winconsole.py | 42 +- pipenv/vendor/click/core.py | 214 +++- pipenv/vendor/click/decorators.py | 25 +- pipenv/vendor/click/exceptions.py | 56 +- pipenv/vendor/click/globals.py | 2 +- pipenv/vendor/click/parser.py | 33 +- pipenv/vendor/click/termui.py | 117 ++- pipenv/vendor/click/testing.py | 102 +- pipenv/vendor/click/types.py | 188 +++- pipenv/vendor/click/utils.py | 29 +- pipenv/vendor/delegator.py | 104 +- pipenv/vendor/distlib/__init__.py | 2 +- pipenv/vendor/distlib/database.py | 7 +- pipenv/vendor/distlib/locators.py | 11 +- pipenv/vendor/distlib/metadata.py | 9 +- pipenv/vendor/distlib/scripts.py | 6 +- pipenv/vendor/distlib/util.py | 15 +- pipenv/vendor/distlib/wheel.py | 8 +- pipenv/vendor/modutil.py | 145 --- pipenv/vendor/packaging/__about__.py | 4 +- pipenv/vendor/packaging/requirements.py | 8 +- pipenv/vendor/packaging/specifiers.py | 2 +- pipenv/vendor/passa/LICENSE | 2 +- pipenv/vendor/passa/actions/__init__.py | 0 pipenv/vendor/passa/actions/add.py | 57 ++ pipenv/vendor/passa/actions/clean.py | 16 + pipenv/vendor/passa/actions/freeze.py | 93 ++ pipenv/vendor/passa/actions/init.py | 59 ++ pipenv/vendor/passa/actions/install.py | 32 + pipenv/vendor/passa/actions/lock.py | 17 + pipenv/vendor/passa/actions/remove.py | 38 + pipenv/vendor/passa/actions/sync.py | 20 + pipenv/vendor/passa/actions/upgrade.py | 52 + pipenv/vendor/passa/cli/__init__.py | 2 +- pipenv/vendor/passa/cli/_base.py | 68 +- pipenv/vendor/passa/cli/add.py | 95 +- pipenv/vendor/passa/cli/clean.py | 29 +- pipenv/vendor/passa/cli/freeze.py | 130 +-- pipenv/vendor/passa/cli/init.py | 32 + pipenv/vendor/passa/cli/install.py | 57 +- pipenv/vendor/passa/cli/lock.py | 21 +- pipenv/vendor/passa/cli/options.py | 153 +++ pipenv/vendor/passa/cli/remove.py | 67 +- pipenv/vendor/passa/cli/sync.py | 39 +- pipenv/vendor/passa/cli/upgrade.py | 84 +- pipenv/vendor/passa/internals/_pip.py | 104 +- pipenv/vendor/passa/internals/candidates.py | 23 +- pipenv/vendor/passa/internals/dependencies.py | 49 +- pipenv/vendor/passa/models/__init__.py | 0 .../passa/{internals => models}/caches.py | 4 +- .../passa/{internals => models}/lockers.py | 40 +- .../passa/{internals => models}/metadata.py | 4 +- .../passa/{internals => models}/projects.py | 10 +- .../passa/{internals => models}/providers.py | 24 +- .../{internals => models}/synchronizers.py | 26 +- pipenv/vendor/pip_shims/__init__.py | 93 +- pipenv/vendor/pip_shims/shims.py | 457 ++++++--- pipenv/vendor/plette/__init__.py | 2 +- pipenv/vendor/plette/lockfiles.py | 26 +- pipenv/vendor/plette/models/base.py | 29 +- pipenv/vendor/pyparsing.py | 87 +- pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/cli.py | 18 +- pipenv/vendor/pythonfinder/models/__init__.py | 19 - pipenv/vendor/pythonfinder/models/python.py | 2 - pipenv/vendor/pythonfinder/models/windows.py | 5 +- pipenv/vendor/pythonfinder/utils.py | 14 +- pipenv/vendor/pytoml/parser.py | 4 +- pipenv/vendor/requirementslib/__init__.py | 2 +- .../requirementslib/models/dependencies.py | 7 +- .../requirementslib/models/requirements.py | 7 +- pipenv/vendor/requirementslib/models/utils.py | 2 - pipenv/vendor/requirementslib/models/vcs.py | 2 +- pipenv/vendor/requirementslib/utils.py | 39 +- pipenv/vendor/shellingham/__init__.py | 2 +- pipenv/vendor/shellingham/posix.py | 2 +- pipenv/vendor/shellingham/posix/_default.py | 27 - pipenv/vendor/shellingham/posix/_proc.py | 34 +- pipenv/vendor/shellingham/posix/_ps.py | 4 +- pipenv/vendor/shellingham/posix/linux.py | 35 - pipenv/vendor/shellingham/posix/proc.py | 14 +- pipenv/vendor/toml.py | 261 +++-- pipenv/vendor/toml/LICENSE | 26 + pipenv/vendor/toml/__init__.py | 21 + pipenv/vendor/toml/decoder.py | 945 ++++++++++++++++++ pipenv/vendor/toml/encoder.py | 250 +++++ pipenv/vendor/toml/ordered.py | 15 + pipenv/vendor/toml/tz.py | 21 + pipenv/vendor/tomlkit/__init__.py | 2 +- pipenv/vendor/tomlkit/_compat.py | 13 - pipenv/vendor/tomlkit/_utils.py | 33 + pipenv/vendor/tomlkit/container.py | 11 +- pipenv/vendor/tomlkit/items.py | 53 +- pipenv/vendor/tomlkit/parser.py | 33 +- pipenv/vendor/vistir/__init__.py | 2 +- pipenv/vendor/vistir/misc.py | 130 ++- tasks/vendoring/__init__.py | 6 +- .../vendoring/patches/patched/piptools.patch | 4 +- 120 files changed, 5761 insertions(+), 2200 deletions(-) create mode 100644 pipenv/vendor/attr/__init__.pyi create mode 100644 pipenv/vendor/attr/converters.pyi create mode 100644 pipenv/vendor/attr/exceptions.pyi create mode 100644 pipenv/vendor/attr/filters.pyi create mode 100644 pipenv/vendor/attr/py.typed create mode 100644 pipenv/vendor/attr/validators.pyi delete mode 100644 pipenv/vendor/click/LICENSE create mode 100644 pipenv/vendor/click/LICENSE.rst delete mode 100644 pipenv/vendor/modutil.py create mode 100644 pipenv/vendor/passa/actions/__init__.py create mode 100644 pipenv/vendor/passa/actions/add.py create mode 100644 pipenv/vendor/passa/actions/clean.py create mode 100644 pipenv/vendor/passa/actions/freeze.py create mode 100644 pipenv/vendor/passa/actions/init.py create mode 100644 pipenv/vendor/passa/actions/install.py create mode 100644 pipenv/vendor/passa/actions/lock.py create mode 100644 pipenv/vendor/passa/actions/remove.py create mode 100644 pipenv/vendor/passa/actions/sync.py create mode 100644 pipenv/vendor/passa/actions/upgrade.py create mode 100644 pipenv/vendor/passa/cli/init.py create mode 100644 pipenv/vendor/passa/cli/options.py create mode 100644 pipenv/vendor/passa/models/__init__.py rename pipenv/vendor/passa/{internals => models}/caches.py (98%) rename pipenv/vendor/passa/{internals => models}/lockers.py (86%) rename pipenv/vendor/passa/{internals => models}/metadata.py (97%) rename pipenv/vendor/passa/{internals => models}/projects.py (96%) rename pipenv/vendor/passa/{internals => models}/providers.py (92%) rename pipenv/vendor/passa/{internals => models}/synchronizers.py (91%) delete mode 100644 pipenv/vendor/shellingham/posix/_default.py delete mode 100644 pipenv/vendor/shellingham/posix/linux.py create mode 100644 pipenv/vendor/toml/LICENSE create mode 100644 pipenv/vendor/toml/__init__.py create mode 100644 pipenv/vendor/toml/decoder.py create mode 100644 pipenv/vendor/toml/encoder.py create mode 100644 pipenv/vendor/toml/ordered.py create mode 100644 pipenv/vendor/toml/tz.py diff --git a/Pipfile.lock b/Pipfile.lock index a5f12e14be..3990a45167 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -17,17 +17,16 @@ "develop": { "alabaster": { "hashes": [ - "sha256:674bb3bab080f598371f4443c5008cbfeb1a5e622dd312395d2d82af2c54c456", - "sha256:b63b1f4dc77c074d386752ec4a8a7517600f6c0db8cd42980cae17ab7b3275d7" + "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", + "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" ], - "version": "==0.7.11" + "version": "==0.7.12" }, "apipkg": { "hashes": [ "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" ], - "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*'", "version": "==1.5" }, "appdirs": { @@ -54,17 +53,17 @@ }, "atomicwrites": { "hashes": [ - "sha256:240831ea22da9ab882b551b31d4225591e5e447a68c5e188db5b89ca1d487585", - "sha256:a24da68318b08ac9c9c45029f4a10371ab5b20e4226738e150e6e7c571630ae6" + "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", + "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" ], - "version": "==1.1.5" + "version": "==1.2.1" }, "attrs": { "hashes": [ - "sha256:4b90b09eeeb9b88c35bc642cbac057e45a5fd85367b985bd2809c62b7b939265", - "sha256:e0d0eb91441a3b53dab4d9b743eafc1ac44476296a2053b6ca3af0b139faf87b" + "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", + "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb" ], - "version": "==18.1.0" + "version": "==18.2.0" }, "babel": { "hashes": [ @@ -75,12 +74,19 @@ }, "black": { "hashes": [ - "sha256:22158b89c1a6b4eb333a1e65e791a3f8b998cf3b11ae094adb2570f31f769a44", - "sha256:4b475bbd528acce094c503a3d2dbc2d05a4075f6d0ef7d9e7514518e14cc5191" + "sha256:817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739", + "sha256:e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5" ], "index": "pypi", "markers": "python_version >= '3.6'", - "version": "==18.6b4" + "version": "==18.9b0" + }, + "bleach": { + "hashes": [ + "sha256:9c471c0dd9c820f6bf4ee5ca3e348ceccefbc1475d9a40c397ed5d04e0b42c54", + "sha256:b407b2612b37e6cdc6704f84cec18c1f140b78e6c625652a844e89d6b9855f6b" + ], + "version": "==3.0.0" }, "cerberus": { "hashes": [ @@ -90,10 +96,47 @@ }, "certifi": { "hashes": [ - "sha256:4c1d68a1408dd090d2f3a869aa94c3947cc1d967821d1ed303208c9f41f0f2f4", - "sha256:b6e8b28b2b7e771a41ecdd12d4d43262ecab52adebbafa42c77d6b57fb6ad3a4" - ], - "version": "==2018.8.13" + "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638", + "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a" + ], + "version": "==2018.8.24" + }, + "cffi": { + "hashes": [ + "sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743", + "sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef", + "sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50", + "sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f", + "sha256:3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30", + "sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93", + "sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257", + "sha256:495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b", + "sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3", + "sha256:57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e", + "sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc", + "sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04", + "sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6", + "sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359", + "sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596", + "sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b", + "sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd", + "sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95", + "sha256:a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5", + "sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e", + "sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6", + "sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca", + "sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31", + "sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1", + "sha256:ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2", + "sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085", + "sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801", + "sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4", + "sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184", + "sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917", + "sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f", + "sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb" + ], + "version": "==1.11.5" }, "chardet": { "hashes": [ @@ -104,11 +147,44 @@ }, "click": { "hashes": [ - "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", - "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" + "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", + "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" ], "index": "pypi", - "version": "==6.7" + "version": "==7.0" + }, + "cmarkgfm": { + "hashes": [ + "sha256:0186dccca79483e3405217993b83b914ba4559fe9a8396efc4eea56561b74061", + "sha256:1a625afc6f62da428df96ec325dc30866cc5781520cbd904ff4ec44cf018171c", + "sha256:207b7673ff4e177374c572feeae0e4ef33be620ec9171c08fd22e2b796e03e3d", + "sha256:275905bb371a99285c74931700db3f0c078e7603bed383e8cf1a09f3ee05a3de", + "sha256:50098f1c4950722521f0671e54139e0edc1837d63c990cf0f3d2c49607bb51a2", + "sha256:50ed116d0b60a07df0dc7b180c28569064b9d37d1578d4c9021cff04d725cb63", + "sha256:61a72def110eed903cd1848245897bcb80d295cd9d13944d4f9f30cba5b76655", + "sha256:64186fb75d973a06df0e6ea12879533b71f6e7ba1ab01ffee7fc3e7534758889", + "sha256:665303d34d7f14f10d7b0651082f25ebf7107f29ef3d699490cac16cdc0fc8ce", + "sha256:70b18f843aec58e4e64aadce48a897fe7c50426718b7753aaee399e72df64190", + "sha256:761ee7b04d1caee2931344ac6bfebf37102ffb203b136b676b0a71a3f0ea3c87", + "sha256:811527e9b7280b136734ed6cb6845e5fbccaeaa132ddf45f0246cbe544016957", + "sha256:987b0e157f70c72a84f3c2f9ef2d7ab0f26c08f2bf326c12c087ff9eebcb3ff5", + "sha256:9fc6a2183d0a9b0974ec7cdcdad42bd78a3be674cc3e65f87dd694419b3b0ab7", + "sha256:a3d17ee4ae739fe16f7501a52255c2e287ac817cfd88565b9859f70520afffea", + "sha256:ba5b5488719c0f2ced0aa1986376f7baff1a1653a8eb5fdfcf3f84c7ce46ef8d", + "sha256:c573ea89dd95d41b6d8cf36799c34b6d5b1eac4aed0212dee0f0a11fb7b01e8f", + "sha256:c5f1b9e8592d2c448c44e6bc0d91224b16ea5f8293908b1561de1f6d2d0658b1", + "sha256:cbe581456357d8f0674d6a590b1aaf46c11d01dd0a23af147a51a798c3818034", + "sha256:cf219bec69e601fe27e3974b7307d2f06082ab385d42752738ad2eb630a47d65", + "sha256:cf5014eb214d814a83a7a47407272d5db10b719dbeaf4d3cfe5969309d0fcf4b", + "sha256:d08bad67fa18f7e8ff738c090628ee0cbf0505d74a991c848d6d04abfe67b697", + "sha256:d6f716d7b1182bf35862b5065112f933f43dd1aa4f8097c9bcfb246f71528a34", + "sha256:e08e479102627641c7cb4ece421c6ed4124820b1758765db32201136762282d9", + "sha256:e20ac21418af0298437d29599f7851915497ce9f2866bc8e86b084d8911ee061", + "sha256:e25f53c37e319241b9a412382140dffac98ca756ba8f360ac7ab5e30cad9670a", + "sha256:e8932bddf159064f04e946fbb64693753488de21586f20e840b3be51745c8c09", + "sha256:f20900f16377f2109783ae9348d34bc80530808439591c3d3df73d5c7ef1a00c" + ], + "version": "==0.4.2" }, "colorama": { "hashes": [ @@ -127,9 +203,9 @@ }, "distlib": { "hashes": [ - "sha256:cd502c66fc27c535bab62dc4f482e403e2369c2c05281a79cc2d4e2f42a87f20" + "sha256:57977cd7d9ea27986ec62f425630e4ddb42efe651ff80bc58ed8dbc3c7c21f19" ], - "version": "==0.2.7" + "version": "==0.2.8" }, "docutils": { "hashes": [ @@ -154,7 +230,6 @@ "sha256:a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", "sha256:fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83" ], - "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*'", "version": "==1.5.0" }, "first": { @@ -203,6 +278,12 @@ "markers": "python_version < '3' and python_version >= '2.6'", "version": "==3.2.0" }, + "future": { + "hashes": [ + "sha256:e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb" + ], + "version": "==0.16.0" + }, "idna": { "hashes": [ "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", @@ -212,10 +293,10 @@ }, "imagesize": { "hashes": [ - "sha256:3620cc0cadba3f7475f9940d22431fc4d407269f1be59ec9b8edcca26440cf18", - "sha256:5b326e4678b6925158ccc66a9fa3122b6106d7c876ee32d7de6ce59385b96315" + "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", + "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" ], - "version": "==1.0.0" + "version": "==1.1.0" }, "incremental": { "hashes": [ @@ -226,12 +307,12 @@ }, "invoke": { "hashes": [ - "sha256:1c2cf54c9b9af973ad9704d8ba81b225117cab612568cacbfb3fc42958cc20a9", - "sha256:334495ea16e73948894e9535019f87a88a44b73e7977492b12c2d1b5085f8197", - "sha256:54bdd3fd0245abd1185e05359fd2e4f26be0657cfe7d7bb1bed735e054fa53ab" + "sha256:4f4de934b15c2276caa4fbc5a3b8a61c0eb0b234f2be1780d2b793321995c2d6", + "sha256:dc492f8f17a0746e92081aec3f86ae0b4750bf41607ea2ad87e5a7b5705121b7", + "sha256:eb6f9262d4d25b40330fb21d1e99bf0f85011ccc3526980f8a3eaedd4b43892e" ], "index": "pypi", - "version": "==1.1.1" + "version": "==1.2.0" }, "isort": { "hashes": [ @@ -250,11 +331,11 @@ }, "jedi": { "hashes": [ - "sha256:b409ed0f6913a701ed474a614a3bb46e6953639033e31f769ca7581da5bd1ec1", - "sha256:c254b135fb39ad76e78d4d8f92765ebc9bf92cbc76f49e97ade1d5f5121e1f6f" + "sha256:0191c447165f798e6a730285f2eee783fff81b0d3df261945ecb80983b5c3ca7", + "sha256:b7493f73a2febe0dc33d51c99b474547f7f6c0b2c8fb2b21f453eef204c12148" ], "index": "pypi", - "version": "==0.12.1" + "version": "==0.13.1" }, "jinja2": { "hashes": [ @@ -284,14 +365,6 @@ "index": "pypi", "version": "==2.0.0" }, - "modutil": { - "hashes": [ - "sha256:2c85c1666649e92e56de17c00e1e831313602d9b55e8661d39c01e39003b45f7", - "sha256:cc3dad264e36ed359fdd67c4588959d2996bd0402ad9c9d974ca906821537218" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, "more-itertools": { "hashes": [ "sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092", @@ -302,10 +375,10 @@ }, "packaging": { "hashes": [ - "sha256:e9215d2d2535d3ae866c3d6efc77d5b24a0192cce0ff20e42896cc0664f889c0", - "sha256:f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b" + "sha256:0886227f54515e592aaa2e5a553332c73962917f2831f1b0f9b9f4380a4b9807", + "sha256:f95a1e147590f204328170981833854229bb2912ac3d5f89e2a8ccd2834800c9" ], - "version": "==17.1" + "version": "==18.0" }, "parso": { "hashes": [ @@ -325,21 +398,21 @@ "passa": { "editable": true, "git": "https://github.com/sarugaku/passa.git", - "ref": "54e65e01744cafbcab44eb15422e1604b615caae" + "ref": "4f3b8102f122cf0b75e5d7c513a2e61b0b093dcd" }, "pbr": { "hashes": [ - "sha256:1b8be50d938c9bb75d0eaf7eda111eec1bf6dc88a62a6412e33bf077457e0f45", - "sha256:b486975c0cafb6beeb50ca0e17ba047647f229087bd74e37f4a7e2cac17d2caa" + "sha256:1be135151a0da949af8c5d0ee9013d9eafada71237eb80b3ba8896b4f12ec5dc", + "sha256:cf36765bf2218654ae824ec8e14257259ba44e43b117fd573c8d07a9895adbdd" ], - "version": "==4.2.0" + "version": "==4.3.0" }, "pip-shims": { "hashes": [ - "sha256:9c8a568b4a8ce4000a2982224f48a35736fca81214dfdb30dcae24287866a7e4", - "sha256:ebc2bb29ddd21fa00c0cf28a5d8c725100f2f7ee98703aba237efd02e205c1c1" + "sha256:164b93bc94b207613d9632f28f4d55eba9301f9454aaaba335de36c24d92d106", + "sha256:27e2439aa93af8c1b8e58cf63a40cbcd26959b26424904f2e6d57837af8f76c5" ], - "version": "==0.1.2" + "version": "==0.3.0" }, "pipenv": { "editable": true, @@ -354,11 +427,10 @@ }, "plette": { "hashes": [ - "sha256:3c2bbf439dad64d9a89459c20305b976d797ae1c2ad48a540e7022b377717851", - "sha256:cc9490a009494395dea286f4f488e0839ac06e1361d4951cc12fe621b6f0a68c" + "sha256:c0e3553c1e581d8423daccbd825789c6e7f29b7d9e00e5331b12e1642a1a26d3", + "sha256:dde5d525cf5f0cbad4d938c83b93db17887918daf63c13eafed257c4f61b07b4" ], - "markers": "python_version >= '2.6'", - "version": "==0.1.1" + "version": "==0.2.2" }, "pluggy": { "hashes": [ @@ -369,10 +441,10 @@ }, "py": { "hashes": [ - "sha256:3fd59af7435864e1a243790d322d763925431213b6b8529c6ca71081ace3bbf7", - "sha256:e31fb2767eb657cbde86c454f02e99cb846d3cd9d61b318525140214fdc0e98e" + "sha256:06a30435d058473046be836d3fc4f27167fd84c45b99704f2fb5509ef61f9af1", + "sha256:50402e9d1c9005d759426988a492e0edaadb7f4e68bcddfea586bc7432d009c6" ], - "version": "==1.5.4" + "version": "==1.6.0" }, "pycodestyle": { "hashes": [ @@ -381,6 +453,12 @@ ], "version": "==2.3.1" }, + "pycparser": { + "hashes": [ + "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" + ], + "version": "==2.19" + }, "pyflakes": { "hashes": [ "sha256:08bd6a50edf8cffa9fa09a463063c425ecaaf10d1eb0335a7e8b1401aef89e6f", @@ -397,25 +475,24 @@ }, "pyparsing": { "hashes": [ - "sha256:0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04", - "sha256:fee43f17a9c4087e7ed1605bd6df994c6173c1e977d7ade7b651292fab2bd010" + "sha256:bc6c7146b91af3f567cf6daeaec360bc07d45ffec4cf5353f4d7a208ce7ca30a", + "sha256:d29593d8ebe7b57d6967b62494f8c72b03ac0262b1eed63826c6f788b3606401" ], - "version": "==2.2.0" + "version": "==2.2.2" }, "pytest": { "hashes": [ - "sha256:3459a123ad5532852d36f6f4501dfe1acf4af1dd9541834a164666aa40395b02", - "sha256:96bfd45dbe863b447a3054145cd78a9d7f31475d2bce6111b133c0cc4f305118" + "sha256:7e258ee50338f4e46957f9e09a0f10fb1c2d05493fa901d113a8dafd0790de4e", + "sha256:9332147e9af2dcf46cd7ceb14d5acadb6564744ddff1fe8c17f0ce60ece7d9a2" ], "index": "pypi", - "version": "==3.7.2" + "version": "==3.8.2" }, "pytest-forked": { "hashes": [ "sha256:e4500cd0509ec4a26535f7d4112a8cc0f17d3a41c29ffd4eab479d2a55b30805", "sha256:f275cb48a73fc61a6710726348e1da6d68a978f0ec0c54ece5a5fae5977e5a08" ], - "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*'", "version": "==0.2" }, "pytest-pypy": { @@ -424,19 +501,19 @@ }, "pytest-tap": { "hashes": [ - "sha256:06ff3ca882b69814f20e36533157394372479c91c21233964dab52c071faf8d3", - "sha256:ea621f3ab77c12dc662ad6daeeb0152cfab522e2ef968d90500cf4c0a59dc965" + "sha256:3b05ec931424bbe44e944726b68f7ef185bb6d25ce9ce21ac52c9af7ffa9b506", + "sha256:ca063de56298034302f3cbce55c87a27d7bfa7af7de591cdb9ec6ce45fea5467" ], "index": "pypi", - "version": "==2.2" + "version": "==2.3" }, "pytest-xdist": { "hashes": [ - "sha256:3308c4f6221670432d01e0b393b333d77c1fd805532e1d64450e8140855eb51b", - "sha256:cce08b4b7f56d34d43b365e2b3667ebb8edcf91d01c2a8fccf45c56d37e71bc1" + "sha256:06aa39361694c9365baaa03bec71159b59ad06c9826c6279ebba368cb3571561", + "sha256:1ef0d05c905cfa0c5442c90e9e350e65c6ada120e33a00a066ca51c89f5f869a" ], "index": "pypi", - "version": "==1.22.5" + "version": "==1.23.2" }, "pytz": { "hashes": [ @@ -446,6 +523,13 @@ "index": "pypi", "version": "==2018.5" }, + "readme-renderer": { + "hashes": [ + "sha256:237ca8705ffea849870de41101dba41543561da05c0ae45b2f1c547efa9843d2", + "sha256:f75049a3a7afa57165551e030dd8f9882ebf688b9600535a3f7e23596651875d" + ], + "version": "==22.0" + }, "requests": { "hashes": [ "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", @@ -462,17 +546,17 @@ }, "requirementslib": { "hashes": [ - "sha256:698a566272669a470a8a439250353b0b628468ea0879f4f32e22245c2b9d9e44", - "sha256:c7031b128e13eb8d8847f9ce409f3bfab49e8d111e7e8fca432ff8a737820653" + "sha256:39fb4aab3ebd7f46b266ddc98a3ac731127ee35fe6cf1b3e11be7c6551cc2c9b", + "sha256:810d8961f333d8fef92400f58b25f80003151fb424a545e244073fc3d95ae2dd" ], - "version": "==1.1.1" + "version": "==1.1.7" }, "resolvelib": { "hashes": [ - "sha256:d52f2c0762deeb2a4cc34a84371a7a5ac85e111bdc69ce9ae729d8d636606ad6", - "sha256:eb759d43bbf50de9bf36afb9f6c269fabf9ff49084dbfad4ba67252d134bf4b5" + "sha256:6c4c6690b0bdd78bcc002e1a5d1b6abbde58c694a6ea1838f165b20d2c943db7", + "sha256:8734e53271ef98f38a2c99324d5e7905bc00c97dc3fc5bb7d83c82a979e71c04" ], - "version": "==0.2.1" + "version": "==0.2.2" }, "rope": { "hashes": [ @@ -528,45 +612,44 @@ }, "tap.py": { "hashes": [ - "sha256:03accd27118473475b33b44703b223df2f148679b9b01b6ac59866df0b580073", - "sha256:06416d376f0d398ab163674f30ea3b4a320957e4baa51793b8e86bdfdfeb857d" + "sha256:8ad62ba6898fcef4913c67d468d0c4beae3109b74c03363538145e31b1840b29", + "sha256:f6532fd7483c5fdc2ed13575fa4494e7d037f797f8a2c6f8809a859be61271f5" ], - "version": "==2.4" + "version": "==2.5" }, "toml": { "hashes": [ - "sha256:8e86bd6ce8cc11b9620cb637466453d94f5d57ad86f17e98a98d1f73e3baab2d" + "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", + "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" ], - "version": "==0.9.4" + "version": "==0.10.0" }, "tomlkit": { "hashes": [ - "sha256:4f112445d6e52a038adf23b027ccb11905fdf88976990116e8f7b171b768cedb", - "sha256:8b84ac193aa6366769f89541cf213efe9784ac125f08164974400c43f18fcd9f" + "sha256:8ab16e93162fc44d3ad83d2aa29a7140b8f7d996ae1790a73b9a7aed6fb504ac", + "sha256:ca181cee7aee805d455628f7c94eb8ae814763769a93e69157f250fe4ebe1926" ], - "markers": "python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*' and python_version != '3.0.*' and python_version >= '2.7'", - "version": "==0.4.2" + "version": "==0.4.4" }, "towncrier": { "editable": true, "git": "https://github.com/hawkowl/towncrier.git", - "ref": "3d600a813b8bb4277b8bd77360e54531ce274b58" + "ref": "47754a607a9b03f06affaf167d65b990786aae25" }, "tqdm": { "hashes": [ - "sha256:5ef526702c0d265d5a960a3b27f3971fac13c26cf0fb819294bfa71fc6026c88", - "sha256:a3364bd83ce4777320b862e3c8a93d7da91e20a95f06ef79bed7dd71c654cafa" + "sha256:18f1818ce951aeb9ea162ae1098b43f583f7d057b34d706f66939353d1208889", + "sha256:df02c0650160986bac0218bb07952245fc6960d23654648b5d5526ad5a4128c9" ], - "markers": "python_version != '3.1.*' and python_version != '3.0.*' and python_version >= '2.6'", - "version": "==4.25.0" + "version": "==4.26.0" }, "twine": { "hashes": [ - "sha256:08eb132bbaec40c6d25b358f546ec1dc96ebd2638a86eea68769d9e67fe2b129", - "sha256:2fd9a4d9ff0bcacf41fdc40c8cb0cfaef1f1859457c9653fd1b92237cc4e9f25" + "sha256:7d89bc6acafb31d124e6e5b295ef26ac77030bf098960c2a4c4e058335827c5c", + "sha256:fad6f1251195f7ddd1460cb76d6ea106c93adb4e56c41e0da79658e56e547d2c" ], "index": "pypi", - "version": "==1.11.0" + "version": "==1.12.1" }, "typing": { "hashes": [ @@ -589,7 +672,6 @@ "sha256:2ce32cd126117ce2c539f0134eb89de91a8413a29baac49cbab3eb50e2026669", "sha256:ca07b4c0b54e14a91af9f34d0919790b016923d157afda5efdde55c96718f752" ], - "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.1.*'", "version": "==16.0.0" }, "virtualenv-clone": { @@ -601,10 +683,17 @@ }, "vistir": { "hashes": [ - "sha256:011e52dd2e09f948f638262dc39fef38998d134538705a810e88ad6d7bb94c1c", - "sha256:f447923d4c59e8d50add4a9d8275b25a1f038f1a1a00ded50ee3c3d00a3c7f5d" + "sha256:8a360ac20cbcc0863d6dbbe7a52e8b2c9ebf48abd6833c3813a82c70708244af", + "sha256:bc6e10284792485c10585536e6aede9e38996c841cc9d2a67238cd05742c2d0b" + ], + "version": "==0.1.6" + }, + "webencodings": { + "hashes": [ + "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", + "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" ], - "version": "==0.1.4" + "version": "==0.5.1" }, "werkzeug": { "hashes": [ @@ -615,11 +704,17 @@ }, "wheel": { "hashes": [ - "sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c", - "sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f" + "sha256:9fa1f772f1a2df2bd00ddb4fa57e1cc349301e1facb98fbe62329803a9ff1196", + "sha256:d215f4520a1ba1851a3c00ba2b4122665cd3d6b0834d2ba2816198b1e3024a0e" + ], + "version": "==0.32.1" + }, + "yaspin": { + "hashes": [ + "sha256:36fdccc5e0637b5baa8892fe2c3d927782df7d504e9020f40eb2c1502518aa5a", + "sha256:8e52bf8079a48e2a53f3dfeec9e04addb900c101d1591c85df69cf677d3237e7" ], - "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.1.*'", - "version": "==0.31.1" + "version": "==0.14.0" } } } diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index eb20560d94..2b156073a8 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -20,11 +20,11 @@ SafeFileCache ) os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" -from pip_shims.shims import pip_import, VcsSupport, WheelCache +from pip_shims.shims import do_import, VcsSupport, WheelCache from packaging.requirements import Requirement from packaging.specifiers import SpecifierSet, Specifier from packaging.markers import Op, Value, Variable, Marker -InstallationError = pip_import("InstallationError", "exceptions.InstallationError", "7.0", "9999") +InstallationError = do_import(("exceptions.InstallationError", "7.0", "9999")) from notpip._internal.resolve import Resolver as PipResolver @@ -264,7 +264,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= 'download_dir': download_dir, 'wheel_download_dir': self._wheel_download_dir, 'progress_bar': 'off', - 'build_isolation': True + 'build_isolation': False } resolver_kwargs = { 'finder': self.finder, @@ -277,7 +277,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= 'isolated': False, 'wheel_cache': wheel_cache, 'use_user_site': False, - 'ignore_compatibility': True + 'ignore_compatibility': False } resolver = None preparer = None diff --git a/pipenv/vendor/attr/__init__.py b/pipenv/vendor/attr/__init__.py index 13cb6298da..debfd57b0f 100644 --- a/pipenv/vendor/attr/__init__.py +++ b/pipenv/vendor/attr/__init__.py @@ -6,16 +6,24 @@ from ._config import get_run_validators, set_run_validators from ._funcs import asdict, assoc, astuple, evolve, has from ._make import ( - NOTHING, Attribute, Factory, attrib, attrs, fields, fields_dict, - make_class, validate + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, ) -__version__ = "18.1.0" +__version__ = "18.2.0" __title__ = "attrs" __description__ = "Classes Without Boilerplate" -__uri__ = "http://www.attrs.org/" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ __doc__ = __description__ + " <" + __uri__ + ">" __author__ = "Hynek Schlawack" diff --git a/pipenv/vendor/attr/__init__.pyi b/pipenv/vendor/attr/__init__.pyi new file mode 100644 index 0000000000..492fb85ede --- /dev/null +++ b/pipenv/vendor/attr/__init__.pyi @@ -0,0 +1,252 @@ +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Optional, + Sequence, + Mapping, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import exceptions as exceptions +from . import filters as filters +from . import converters as converters +from . import validators as validators + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_ValidatorType = Callable[[Any, Attribute, _T], Any] +_ConverterType = Callable[[Any], _T] +_FilterType = Callable[[Attribute, Any], bool] +# FIXME: in reality, if multiple validators are passed they must be in a list or tuple, +# but those are invariant and so would prevent subtypes of _ValidatorType from working +# when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +@overload +def Factory(factory: Callable[[], _T]) -> _T: ... +@overload +def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., +) -> _T: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: bool + cmp: bool + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType[_T]] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + def __lt__(self, x: Attribute) -> bool: ... + def __le__(self, x: Attribute) -> bool: ... + def __gt__(self, x: Attribute) -> bool: ... + def __ge__(self, x: Attribute) -> bool: ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting TypeVars +# e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + convert: None = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + convert: Optional[_ConverterType[_T]] = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType[_T]] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + convert: Optional[_ConverterType[_T]] = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType[_T]] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + convert: Optional[_ConverterType[_T]] = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType[_T]] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., +) -> Any: ... +@overload +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., +) -> _C: ... +@overload +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., +) -> Callable[[_C], _C]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute, ...]): + def __getattr__(self, name: str) -> Attribute: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute]: ... +def validate(inst: Any) -> None: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. waiting on one of these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType] = ..., + tuple_factory: Type[Sequence] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/pipenv/vendor/attr/_compat.py b/pipenv/vendor/attr/_compat.py index 42a91ee5dc..5bb065932c 100644 --- a/pipenv/vendor/attr/_compat.py +++ b/pipenv/vendor/attr/_compat.py @@ -14,6 +14,7 @@ ordered_dict = dict else: from collections import OrderedDict + ordered_dict = OrderedDict @@ -39,38 +40,45 @@ class ReadOnlyDict(IterableUserDict): def __setitem__(self, key, val): # We gently pretend we're a Python 3 mappingproxy. - raise TypeError("'mappingproxy' object does not support item " - "assignment") + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) def update(self, _): # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError("'mappingproxy' object has no attribute " - "'update'") + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) def __delitem__(self, _): # We gently pretend we're a Python 3 mappingproxy. - raise TypeError("'mappingproxy' object does not support item " - "deletion") + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) def clear(self): # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError("'mappingproxy' object has no attribute " - "'clear'") + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) def pop(self, key, default=None): # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError("'mappingproxy' object has no attribute " - "'pop'") + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) def popitem(self): # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError("'mappingproxy' object has no attribute " - "'popitem'") + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) def setdefault(self, key, default=None): # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError("'mappingproxy' object has no attribute " - "'setdefault'") + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) def __repr__(self): # Override to be identical to the Python 3 version. @@ -81,7 +89,9 @@ def metadata_proxy(d): res.data.update(d) # We blocked update, so we have to do it like this. return res + else: + def isclass(klass): return isinstance(klass, type) @@ -99,10 +109,12 @@ def import_ctypes(): Moved into a function for testability. """ import ctypes + return ctypes if not PY2: + def just_warn(*args, **kw): """ We only warn on Python 3 because we are not aware of any concrete @@ -114,7 +126,10 @@ def just_warn(*args, **kw): RuntimeWarning, stacklevel=2, ) + + else: + def just_warn(*args, **kw): # pragma: nocover """ We only warn on Python 3 because we are not aware of any concrete @@ -127,8 +142,10 @@ def make_set_closure_cell(): Moved into a function for testability. """ if PYPY: # pragma: no cover + def set_closure_cell(cell, value): cell.__setstate__((value,)) + else: try: ctypes = import_ctypes() diff --git a/pipenv/vendor/attr/_funcs.py b/pipenv/vendor/attr/_funcs.py index 798043af3f..b61d239412 100644 --- a/pipenv/vendor/attr/_funcs.py +++ b/pipenv/vendor/attr/_funcs.py @@ -7,8 +7,13 @@ from .exceptions import AttrsAttributeNotFoundError -def asdict(inst, recurse=True, filter=None, dict_factory=dict, - retain_collection_types=False): +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, +): """ Return the ``attrs`` attribute values of *inst* as a dict. @@ -44,22 +49,32 @@ def asdict(inst, recurse=True, filter=None, dict_factory=dict, continue if recurse is True: if has(v.__class__): - rv[a.name] = asdict(v, recurse=True, filter=filter, - dict_factory=dict_factory) + rv[a.name] = asdict( + v, True, filter, dict_factory, retain_collection_types + ) elif isinstance(v, (tuple, list, set)): cf = v.__class__ if retain_collection_types is True else list - rv[a.name] = cf([ - asdict(i, recurse=True, filter=filter, - dict_factory=dict_factory) - if has(i.__class__) else i - for i in v - ]) + rv[a.name] = cf( + [ + _asdict_anything( + i, filter, dict_factory, retain_collection_types + ) + for i in v + ] + ) elif isinstance(v, dict): df = dict_factory - rv[a.name] = df(( - asdict(kk, dict_factory=df) if has(kk.__class__) else kk, - asdict(vv, dict_factory=df) if has(vv.__class__) else vv) - for kk, vv in iteritems(v)) + rv[a.name] = df( + ( + _asdict_anything( + kk, filter, df, retain_collection_types + ), + _asdict_anything( + vv, filter, df, retain_collection_types + ), + ) + for kk, vv in iteritems(v) + ) else: rv[a.name] = v else: @@ -67,8 +82,44 @@ def asdict(inst, recurse=True, filter=None, dict_factory=dict, return rv -def astuple(inst, recurse=True, filter=None, tuple_factory=tuple, - retain_collection_types=False): +def _asdict_anything(val, filter, dict_factory, retain_collection_types): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict(val, True, filter, dict_factory, retain_collection_types) + elif isinstance(val, (tuple, list, set)): + cf = val.__class__ if retain_collection_types is True else list + rv = cf( + [ + _asdict_anything( + i, filter, dict_factory, retain_collection_types + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything(kk, filter, df, retain_collection_types), + _asdict_anything(vv, filter, df, retain_collection_types), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): """ Return the ``attrs`` attribute values of *inst* as a tuple. @@ -104,34 +155,56 @@ def astuple(inst, recurse=True, filter=None, tuple_factory=tuple, continue if recurse is True: if has(v.__class__): - rv.append(astuple(v, recurse=True, filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain)) + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) elif isinstance(v, (tuple, list, set)): cf = v.__class__ if retain is True else list - rv.append(cf([ - astuple(j, recurse=True, filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain) - if has(j.__class__) else j - for j in v - ])) + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) elif isinstance(v, dict): df = v.__class__ if retain is True else dict - rv.append(df( + rv.append( + df( ( astuple( kk, tuple_factory=tuple_factory, - retain_collection_types=retain - ) if has(kk.__class__) else kk, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, astuple( vv, tuple_factory=tuple_factory, - retain_collection_types=retain - ) if has(vv.__class__) else vv + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, ) - for kk, vv in iteritems(v))) + for kk, vv in iteritems(v) + ) + ) else: rv.append(v) else: @@ -169,16 +242,21 @@ def assoc(inst, **changes): Use :func:`evolve` instead. """ import warnings - warnings.warn("assoc is deprecated and will be removed after 2018/01.", - DeprecationWarning, stacklevel=2) + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) new = copy.copy(inst) attrs = fields(inst.__class__) for k, v in iteritems(changes): a = getattr(attrs, k, NOTHING) if a is NOTHING: raise AttrsAttributeNotFoundError( - "{k} is not an attrs attribute on {cl}." - .format(k=k, cl=new.__class__) + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) ) _obj_setattr(new, k, v) return new diff --git a/pipenv/vendor/attr/_make.py b/pipenv/vendor/attr/_make.py index fc446115c9..f7fd05e739 100644 --- a/pipenv/vendor/attr/_make.py +++ b/pipenv/vendor/attr/_make.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, division, print_function +import copy import hashlib import linecache import sys @@ -10,11 +11,19 @@ from . import _config from ._compat import ( - PY2, isclass, iteritems, metadata_proxy, ordered_dict, set_closure_cell + PY2, + isclass, + iteritems, + metadata_proxy, + ordered_dict, + set_closure_cell, ) from .exceptions import ( - DefaultAlreadySetError, FrozenInstanceError, NotAnAttrsClassError, - UnannotatedAttributeError + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, ) @@ -22,8 +31,14 @@ _obj_setattr = object.__setattr__ _init_converter_pat = "__attr_converter_{}" _init_factory_pat = "__attr_factory_{}" -_tuple_property_pat = " {attr_name} = property(itemgetter({index}))" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) _classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar") +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" _empty_metadata_singleton = metadata_proxy({}) @@ -32,26 +47,19 @@ class _Nothing(object): """ Sentinel class to indicate the lack of a value when ``None`` is ambiguous. - All instances of `_Nothing` are equal. + ``_Nothing`` is a singleton. There is only ever one of it. """ - def __copy__(self): - return self - def __deepcopy__(self, _): - return self + _singleton = None - def __eq__(self, other): - return other.__class__ == _Nothing - - def __ne__(self, other): - return not self == other + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton def __repr__(self): return "NOTHING" - def __hash__(self): - return 0xc0ffee - NOTHING = _Nothing() """ @@ -59,10 +67,20 @@ def __hash__(self): """ -def attrib(default=NOTHING, validator=None, - repr=True, cmp=True, hash=None, init=True, - convert=None, metadata=None, type=None, converter=None, - factory=None): +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=True, + hash=None, + init=True, + convert=None, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, +): """ Create a new attribute on a class. @@ -135,6 +153,13 @@ def attrib(default=NOTHING, validator=None, Regardless of the approach used, the type will be stored on ``Attribute.type``. + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + :doc:`static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + .. versionadded:: 15.2.0 *convert* .. versionadded:: 16.3.0 *metadata* .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. @@ -146,6 +171,7 @@ def attrib(default=NOTHING, validator=None, *convert* to achieve consistency with other noun-based arguments. .. versionadded:: 18.1.0 ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* """ if hash is not None and hash is not True and hash is not False: raise TypeError( @@ -161,7 +187,8 @@ def attrib(default=NOTHING, validator=None, warnings.warn( "The `convert` argument is deprecated in favor of `converter`. " "It will be removed after 2019/01.", - DeprecationWarning, stacklevel=2 + DeprecationWarning, + stacklevel=2, ) converter = convert @@ -172,9 +199,7 @@ def attrib(default=NOTHING, validator=None, "exclusive." ) if not callable(factory): - raise ValueError( - "The `factory` argument must be a callable." - ) + raise ValueError("The `factory` argument must be a callable.") default = Factory(factory) if metadata is None: @@ -190,6 +215,7 @@ def attrib(default=NOTHING, validator=None, converter=converter, metadata=metadata, type=type, + kw_only=kw_only, ) @@ -210,24 +236,30 @@ class MyClassAttributes(tuple): ] if attr_names: for i, attr_name in enumerate(attr_names): - attr_class_template.append(_tuple_property_pat.format( - index=i, - attr_name=attr_name, - )) + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) else: attr_class_template.append(" pass") - globs = {"itemgetter": itemgetter} + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} eval(compile("\n".join(attr_class_template), "", "exec"), globs) + return globs[attr_class_name] # Tuple class for extracted attributes from a class definition. -# `super_attrs` is a subset of `attrs`. -_Attributes = _make_attr_tuple_class("_Attributes", [ - "attrs", # all attributes to build dunder methods for - "super_attrs", # attributes that have been inherited - "super_attrs_map", # map inherited attributes to their originating classes -]) +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) def _is_class_var(annot): @@ -250,8 +282,8 @@ def _get_annotations(cls): return {} # Verify that the annotations aren't merely inherited. - for super_cls in cls.__mro__[1:]: - if anns is getattr(super_cls, "__annotations__", None): + for base_cls in cls.__mro__[1:]: + if anns is getattr(base_cls, "__annotations__", None): return {} return anns @@ -264,7 +296,7 @@ def _counter_getter(e): return e[1].counter -def _transform_attrs(cls, these, auto_attribs): +def _transform_attrs(cls, these, auto_attribs, kw_only): """ Transform all `_CountingAttr`s on a class into `Attribute`s. @@ -276,19 +308,14 @@ def _transform_attrs(cls, these, auto_attribs): anns = _get_annotations(cls) if these is not None: - ca_list = [ - (name, ca) - for name, ca - in iteritems(these) - ] + ca_list = [(name, ca) for name, ca in iteritems(these)] if not isinstance(these, ordered_dict): ca_list.sort(key=_counter_getter) elif auto_attribs is True: ca_names = { name - for name, attr - in cd.items() + for name, attr in cd.items() if isinstance(attr, _CountingAttr) } ca_list = [] @@ -308,77 +335,91 @@ def _transform_attrs(cls, these, auto_attribs): unannotated = ca_names - annot_names if len(unannotated) > 0: raise UnannotatedAttributeError( - "The following `attr.ib`s lack a type annotation: " + - ", ".join(sorted( - unannotated, - key=lambda n: cd.get(n).counter - )) + "." + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." ) else: - ca_list = sorted(( - (name, attr) - for name, attr - in cd.items() - if isinstance(attr, _CountingAttr) - ), key=lambda e: e[1].counter) + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) own_attrs = [ Attribute.from_counting_attr( - name=attr_name, - ca=ca, - type=anns.get(attr_name), + name=attr_name, ca=ca, type=anns.get(attr_name) ) - for attr_name, ca - in ca_list + for attr_name, ca in ca_list ] - super_attrs = [] - super_attr_map = {} # A dictionary of superattrs to their classes. + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. taken_attr_names = {a.name: a for a in own_attrs} # Traverse the MRO and collect attributes. - for super_cls in cls.__mro__[1:-1]: - sub_attrs = getattr(super_cls, "__attrs_attrs__", None) + for base_cls in cls.__mro__[1:-1]: + sub_attrs = getattr(base_cls, "__attrs_attrs__", None) if sub_attrs is not None: for a in sub_attrs: prev_a = taken_attr_names.get(a.name) # Only add an attribute if it hasn't been defined before. This # allows for overwriting attribute definitions by subclassing. if prev_a is None: - super_attrs.append(a) + base_attrs.append(a) taken_attr_names[a.name] = a - super_attr_map[a.name] = super_cls + base_attr_map[a.name] = base_cls - attr_names = [a.name for a in super_attrs + own_attrs] + attr_names = [a.name for a in base_attrs + own_attrs] AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - attrs = AttrsClass( - super_attrs + [ - Attribute.from_counting_attr( - name=attr_name, - ca=ca, - type=anns.get(attr_name) - ) - for attr_name, ca - in ca_list - ] - ) + if kw_only: + own_attrs = [a._assoc(kw_only=True) for a in own_attrs] + base_attrs = [a._assoc(kw_only=True) for a in base_attrs] + + attrs = AttrsClass(base_attrs + own_attrs) had_default = False + was_kw_only = False for a in attrs: - if had_default is True and a.default is NOTHING and a.init is True: + if ( + was_kw_only is False + and had_default is True + and a.default is NOTHING + and a.init is True + and a.kw_only is False + ): raise ValueError( "No mandatory attributes allowed after an attribute with a " - "default value or factory. Attribute in question: {a!r}" - .format(a=a) + "default value or factory. Attribute in question: %r" % (a,) ) - elif had_default is False and \ - a.default is not NOTHING and \ - a.init is not False: + elif ( + had_default is False + and a.default is not NOTHING + and a.init is not False + and + # Keyword-only attributes without defaults can be specified + # after keyword-only attributes with defaults. + a.kw_only is False + ): had_default = True + if was_kw_only is True and a.kw_only is False: + raise ValueError( + "Non keyword-only attributes are not allowed after a " + "keyword-only attribute. Attribute in question: {a!r}".format( + a=a + ) + ) + if was_kw_only is False and a.init is True and a.kw_only is True: + was_kw_only = True - return _Attributes((attrs, super_attrs, super_attr_map)) + return _Attributes((attrs, base_attrs, base_attr_map)) def _frozen_setattrs(self, name, value): @@ -399,24 +440,47 @@ class _ClassBuilder(object): """ Iteratively build *one* class. """ + __slots__ = ( - "_cls", "_cls_dict", "_attrs", "_super_names", "_attr_names", "_slots", - "_frozen", "_has_post_init", "_delete_attribs", "_super_attr_map", + "_cls", + "_cls_dict", + "_attrs", + "_base_names", + "_attr_names", + "_slots", + "_frozen", + "_weakref_slot", + "_cache_hash", + "_has_post_init", + "_delete_attribs", + "_base_attr_map", ) - def __init__(self, cls, these, slots, frozen, auto_attribs): - attrs, super_attrs, super_map = _transform_attrs( - cls, these, auto_attribs + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + auto_attribs, + kw_only, + cache_hash, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, these, auto_attribs, kw_only ) self._cls = cls self._cls_dict = dict(cls.__dict__) if slots else {} self._attrs = attrs - self._super_names = set(a.name for a in super_attrs) - self._super_attr_map = super_map + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map self._attr_names = tuple(a.name for a in attrs) self._slots = slots - self._frozen = frozen or _has_frozen_superclass(cls) + self._frozen = frozen or _has_frozen_base_class(cls) + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) self._delete_attribs = not bool(these) @@ -433,7 +497,7 @@ def build_class(self): """ Finalize class based on the accumulated configuration. - Builder cannot be used anymore after calling this method. + Builder cannot be used after calling this method. """ if self._slots is True: return self._create_slots_class() @@ -445,14 +509,22 @@ def _patch_original_class(self): Apply accumulated methods and return the class. """ cls = self._cls - super_names = self._super_names + base_names = self._base_names # Clean class of attribute definitions (`attr.ib()`s). if self._delete_attribs: for name in self._attr_names: - if name not in super_names and \ - getattr(cls, name, None) is not None: - delattr(cls, name) + if ( + name not in base_names + and getattr(cls, name, None) is not None + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass # Attach our dunder methods. for name, value in self._cls_dict.items(): @@ -464,20 +536,36 @@ def _create_slots_class(self): """ Build and return a new class with a `__slots__` attribute. """ - super_names = self._super_names + base_names = self._base_names cd = { k: v for k, v in iteritems(self._cls_dict) - if k not in tuple(self._attr_names) + ("__dict__",) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") } + weakref_inherited = False + + # Traverse the MRO to check for an existing __weakref__. + for base_cls in self._cls.__mro__[1:-1]: + if "__weakref__" in getattr(base_cls, "__dict__", ()): + weakref_inherited = True + break + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + # We only add the names of attributes that aren't inherited. # Settings __slots__ to inherited attributes wastes memory. - cd["__slots__"] = tuple( - name - for name in self._attr_names - if name not in super_names - ) + slot_names = [name for name in names if name not in base_names] + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) qualname = getattr(self._cls, "__qualname__", None) if qualname is not None: @@ -492,10 +580,7 @@ def slots_getstate(self): """ Automatically created by attrs. """ - return tuple( - getattr(self, name) - for name in state_attr_names - ) + return tuple(getattr(self, name) for name in state_attr_names) def slots_setstate(self, state): """ @@ -510,11 +595,7 @@ def slots_setstate(self, state): cd["__setstate__"] = slots_setstate # Create new class based on old class and our methods. - cls = type(self._cls)( - self._cls.__name__, - self._cls.__bases__, - cd, - ) + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) # The following is a fix for # https://github.com/python-attrs/attrs/issues/102. On Python 3, @@ -563,7 +644,9 @@ def make_unhashable(self): def add_hash(self): self._cls_dict["__hash__"] = self._add_method_dunders( - _make_hash(self._attrs) + _make_hash( + self._attrs, frozen=self._frozen, cache_hash=self._cache_hash + ) ) return self @@ -575,7 +658,8 @@ def add_init(self): self._has_post_init, self._frozen, self._slots, - self._super_attr_map, + self._cache_hash, + self._base_attr_map, ) ) @@ -584,11 +668,11 @@ def add_init(self): def add_cmp(self): cd = self._cls_dict - cd["__eq__"], cd["__ne__"], cd["__lt__"], cd["__le__"], cd["__gt__"], \ - cd["__ge__"] = ( - self._add_method_dunders(meth) - for meth in _make_cmp(self._attrs) - ) + cd["__eq__"], cd["__ne__"], cd["__lt__"], cd["__le__"], cd[ + "__gt__" + ], cd["__ge__"] = ( + self._add_method_dunders(meth) for meth in _make_cmp(self._attrs) + ) return self @@ -603,7 +687,7 @@ def _add_method_dunders(self, method): try: method.__qualname__ = ".".join( - (self._cls.__qualname__, method.__name__,) + (self._cls.__qualname__, method.__name__) ) except AttributeError: pass @@ -611,9 +695,22 @@ def _add_method_dunders(self, method): return method -def attrs(maybe_cls=None, these=None, repr_ns=None, - repr=True, cmp=True, hash=None, init=True, - slots=False, frozen=False, str=False, auto_attribs=False): +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=True, + cmp=True, + hash=None, + init=True, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, +): r""" A class decorator that adds `dunder `_\ -methods according to the @@ -645,7 +742,7 @@ def attrs(maybe_cls=None, these=None, repr_ns=None, :param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` methods that compare the class as if it were a tuple of its ``attrs`` attributes. But the attributes are *only* - compared, if the type of both classes is *identical*! + compared, if the types of both classes are *identical*! :param hash: If ``None`` (default), the ``__hash__`` method is generated according how *cmp* and *frozen* are set. @@ -653,7 +750,7 @@ def attrs(maybe_cls=None, these=None, repr_ns=None, 2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to None, marking it unhashable (which it is). 3. If *cmp* is False, ``__hash__`` will be left untouched meaning the - ``__hash__`` method of the superclass will be used (if superclass is + ``__hash__`` method of the base class will be used (if base class is ``object``, this means it will fall back to id-based hashing.). Although not recommended, you can decide for yourself and force @@ -693,6 +790,8 @@ def attrs(maybe_cls=None, these=None, repr_ns=None, ``object.__setattr__(self, "attribute_name", value)``. .. _slots: https://docs.python.org/3/reference/datamodel.html#slots + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. :param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes (Python 3.6 and later only) from the class body. @@ -710,6 +809,16 @@ def attrs(maybe_cls=None, these=None, repr_ns=None, Attributes annotated as :data:`typing.ClassVar` are **ignored**. .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, then no attributes of this + class which participate in hash code computation may be mutated + after object creation. + .. versionadded:: 16.0.0 *slots* .. versionadded:: 16.1.0 *frozen* @@ -721,12 +830,30 @@ def attrs(maybe_cls=None, these=None, repr_ns=None, .. versionchanged:: 18.1.0 If *these* is passed, no attributes are deleted from the class body. .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + :class:`DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* """ + def wrap(cls): if getattr(cls, "__class__", None) is None: raise TypeError("attrs only works with new-style classes.") - builder = _ClassBuilder(cls, these, slots, frozen, auto_attribs) + builder = _ClassBuilder( + cls, + these, + slots, + frozen, + weakref_slot, + auto_attribs, + kw_only, + cache_hash, + ) if repr is True: builder.add_repr(repr_ns) @@ -741,14 +868,31 @@ def wrap(cls): "Invalid value for hash. Must be True, False, or None." ) elif hash is False or (hash is None and cmp is False): - pass + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) elif hash is True or (hash is None and cmp is True and frozen is True): builder.add_hash() else: + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) builder.make_unhashable() if init is True: builder.add_init() + else: + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) return builder.build_class() @@ -768,19 +912,22 @@ def wrap(cls): if PY2: - def _has_frozen_superclass(cls): + + def _has_frozen_base_class(cls): """ Check whether *cls* has a frozen ancestor by looking at its __setattr__. """ return ( - getattr( - cls.__setattr__, "__module__", None - ) == _frozen_setattrs.__module__ and - cls.__setattr__.__name__ == _frozen_setattrs.__name__ + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ ) + + else: - def _has_frozen_superclass(cls): + + def _has_frozen_base_class(cls): """ Check whether *cls* has a frozen ancestor by looking at its __setattr__. @@ -795,29 +942,54 @@ def _attrs_to_tuple(obj, attrs): return tuple(getattr(obj, a.name) for a in attrs) -def _make_hash(attrs): +def _make_hash(attrs, frozen, cache_hash): attrs = tuple( a for a in attrs if a.hash is True or (a.hash is None and a.cmp is True) ) + tab = " " + # We cache the generated hash methods for the same kinds of attributes. sha1 = hashlib.sha1() sha1.update(repr(attrs).encode("utf-8")) unique_filename = "" % (sha1.hexdigest(),) type_hash = hash(unique_filename) - lines = [ - "def __hash__(self):", - " return hash((", - " %d," % (type_hash,), - ] - for a in attrs: - lines.append(" self.%s," % (a.name)) - lines.append(" ))") + method_lines = ["def __hash__(self):"] - script = "\n".join(lines) + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + method_lines.extend( + [indent + prefix + "hash((", indent + " %d," % (type_hash,)] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " ))") + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) globs = {} locs = {} bytecode = compile(script, unique_filename, "exec") @@ -839,7 +1011,7 @@ def _add_hash(cls, attrs): """ Add a hash method to *cls*. """ - cls.__hash__ = _make_hash(attrs) + cls.__hash__ = _make_hash(attrs, frozen=False, cache_hash=False) return cls @@ -855,6 +1027,12 @@ def __ne__(self, other): return not result +WARNING_CMP_ISINSTANCE = ( + "Comparision of subclasses using __%s__ is deprecated and will be removed " + "in 2019." +) + + def _make_cmp(attrs): attrs = [a for a in attrs if a.cmp] @@ -871,9 +1049,7 @@ def _make_cmp(attrs): # irregularities like nan == nan is false but (nan,) == (nan,) is true. if attrs: lines.append(" return (") - others = [ - " ) == (", - ] + others = [" ) == ("] for a in attrs: lines.append(" self.%s," % (a.name,)) others.append(" other.%s," % (a.name,)) @@ -910,6 +1086,10 @@ def __lt__(self, other): Automatically created by attrs. """ if isinstance(other, self.__class__): + if other.__class__ is not self.__class__: + warnings.warn( + WARNING_CMP_ISINSTANCE % ("lt",), DeprecationWarning + ) return attrs_to_tuple(self) < attrs_to_tuple(other) else: return NotImplemented @@ -919,6 +1099,10 @@ def __le__(self, other): Automatically created by attrs. """ if isinstance(other, self.__class__): + if other.__class__ is not self.__class__: + warnings.warn( + WARNING_CMP_ISINSTANCE % ("le",), DeprecationWarning + ) return attrs_to_tuple(self) <= attrs_to_tuple(other) else: return NotImplemented @@ -928,6 +1112,10 @@ def __gt__(self, other): Automatically created by attrs. """ if isinstance(other, self.__class__): + if other.__class__ is not self.__class__: + warnings.warn( + WARNING_CMP_ISINSTANCE % ("gt",), DeprecationWarning + ) return attrs_to_tuple(self) > attrs_to_tuple(other) else: return NotImplemented @@ -937,6 +1125,10 @@ def __ge__(self, other): Automatically created by attrs. """ if isinstance(other, self.__class__): + if other.__class__ is not self.__class__: + warnings.warn( + WARNING_CMP_ISINSTANCE % ("ge",), DeprecationWarning + ) return attrs_to_tuple(self) >= attrs_to_tuple(other) else: return NotImplemented @@ -951,8 +1143,9 @@ def _add_cmp(cls, attrs=None): if attrs is None: attrs = cls.__attrs_attrs__ - cls.__eq__, cls.__ne__, cls.__lt__, cls.__le__, cls.__gt__, cls.__ge__ = \ - _make_cmp(attrs) + cls.__eq__, cls.__ne__, cls.__lt__, cls.__le__, cls.__gt__, cls.__ge__ = _make_cmp( # noqa + attrs + ) return cls @@ -964,11 +1157,7 @@ def _make_repr(attrs, ns): """ Make a repr method for *attr_names* adding *ns* to the full name. """ - attr_names = tuple( - a.name - for a in attrs - if a.repr - ) + attr_names = tuple(a.name for a in attrs if a.repr) def __repr__(self): """ @@ -1009,6 +1198,7 @@ def __repr__(self): return "".join(result) + ")" finally: working_set.remove(id(self)) + return __repr__ @@ -1023,34 +1213,21 @@ def _add_repr(cls, ns=None, attrs=None): return cls -def _make_init(attrs, post_init, frozen, slots, super_attr_map): - attrs = [ - a - for a in attrs - if a.init or a.default is not NOTHING - ] +def _make_init(attrs, post_init, frozen, slots, cache_hash, base_attr_map): + attrs = [a for a in attrs if a.init or a.default is not NOTHING] # We cache the generated init methods for the same kinds of attributes. sha1 = hashlib.sha1() sha1.update(repr(attrs).encode("utf-8")) - unique_filename = "".format( - sha1.hexdigest() - ) + unique_filename = "".format(sha1.hexdigest()) script, globs, annotations = _attrs_to_init_script( - attrs, - frozen, - slots, - post_init, - super_attr_map, + attrs, frozen, slots, post_init, cache_hash, base_attr_map ) locs = {} bytecode = compile(script, unique_filename, "exec") attr_dict = dict((a.name, a) for a in attrs) - globs.update({ - "NOTHING": NOTHING, - "attr_dict": attr_dict, - }) + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) if frozen is True: # Save the lookup overhead in __init__ if we need to circumvent # immutability. @@ -1080,7 +1257,8 @@ def _add_init(cls, frozen): getattr(cls, "__attrs_post_init__", False), frozen, _is_slot_cls(cls), - {}, + cache_hash=False, + base_attr_map={}, ) return cls @@ -1162,14 +1340,16 @@ def _is_slot_cls(cls): return "__slots__" in cls.__dict__ -def _is_slot_attr(a_name, super_attr_map): +def _is_slot_attr(a_name, base_attr_map): """ Check if the attribute name comes from a slot class. """ - return a_name in super_attr_map and _is_slot_cls(super_attr_map[a_name]) + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) -def _attrs_to_init_script(attrs, frozen, slots, post_init, super_attr_map): +def _attrs_to_init_script( + attrs, frozen, slots, post_init, cache_hash, base_attr_map +): """ Return a script of an initializer for *attrs* and a dict of globals. @@ -1180,14 +1360,14 @@ def _attrs_to_init_script(attrs, frozen, slots, post_init, super_attr_map): """ lines = [] any_slot_ancestors = any( - _is_slot_attr(a.name, super_attr_map) - for a in attrs + _is_slot_attr(a.name, base_attr_map) for a in attrs ) if frozen is True: if slots is True: lines.append( # Circumvent the __setattr__ descriptor to save one lookup per # assignment. + # Note _setattr will be used again below if cache_hash is True "_setattr = _cached_setattr.__get__(self, self.__class__)" ) @@ -1204,13 +1384,13 @@ def fmt_setter_with_converter(attr_name, value_var): "value_var": value_var, "conv": conv_name, } + else: # Dict frozen classes assign directly to __dict__. # But only if the attribute doesn't come from an ancestor slot # class. - lines.append( - "_inst_dict = self.__dict__" - ) + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") if any_slot_ancestors: lines.append( # Circumvent the __setattr__ descriptor to save one lookup @@ -1219,7 +1399,7 @@ def fmt_setter_with_converter(attr_name, value_var): ) def fmt_setter(attr_name, value_var): - if _is_slot_attr(attr_name, super_attr_map): + if _is_slot_attr(attr_name, base_attr_map): res = "_setattr('%(attr_name)s', %(value_var)s)" % { "attr_name": attr_name, "value_var": value_var, @@ -1233,7 +1413,7 @@ def fmt_setter(attr_name, value_var): def fmt_setter_with_converter(attr_name, value_var): conv_name = _init_converter_pat.format(attr_name) - if _is_slot_attr(attr_name, super_attr_map): + if _is_slot_attr(attr_name, base_attr_map): tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))" else: tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)" @@ -1242,6 +1422,7 @@ def fmt_setter_with_converter(attr_name, value_var): "value_var": value_var, "c": conv_name, } + else: # Not frozen. def fmt_setter(attr_name, value): @@ -1259,12 +1440,13 @@ def fmt_setter_with_converter(attr_name, value_var): } args = [] + kw_only_args = [] attrs_to_validate = [] # This is a dictionary of names to validator and converter callables. # Injecting this into __init__ globals lets us avoid lookups. names_for_globals = {} - annotations = {'return': None} + annotations = {"return": None} for a in attrs: if a.validator: @@ -1280,78 +1462,104 @@ def fmt_setter_with_converter(attr_name, value_var): if has_factory: init_factory_name = _init_factory_pat.format(a.name) if a.converter is not None: - lines.append(fmt_setter_with_converter( - attr_name, - init_factory_name + "({0})".format(maybe_self))) + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) conv_name = _init_converter_pat.format(a.name) names_for_globals[conv_name] = a.converter else: - lines.append(fmt_setter( - attr_name, - init_factory_name + "({0})".format(maybe_self) - )) + lines.append( + fmt_setter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) names_for_globals[init_factory_name] = a.default.factory else: if a.converter is not None: - lines.append(fmt_setter_with_converter( - attr_name, - "attr_dict['{attr_name}'].default" - .format(attr_name=attr_name) - )) + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['{attr_name}'].default".format( + attr_name=attr_name + ), + ) + ) conv_name = _init_converter_pat.format(a.name) names_for_globals[conv_name] = a.converter else: - lines.append(fmt_setter( - attr_name, - "attr_dict['{attr_name}'].default" - .format(attr_name=attr_name) - )) + lines.append( + fmt_setter( + attr_name, + "attr_dict['{attr_name}'].default".format( + attr_name=attr_name + ), + ) + ) elif a.default is not NOTHING and not has_factory: - args.append( - "{arg_name}=attr_dict['{attr_name}'].default".format( - arg_name=arg_name, - attr_name=attr_name, - ) + arg = "{arg_name}=attr_dict['{attr_name}'].default".format( + arg_name=arg_name, attr_name=attr_name ) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) if a.converter is not None: lines.append(fmt_setter_with_converter(attr_name, arg_name)) - names_for_globals[_init_converter_pat.format(a.name)] = ( - a.converter - ) + names_for_globals[ + _init_converter_pat.format(a.name) + ] = a.converter else: lines.append(fmt_setter(attr_name, arg_name)) elif has_factory: - args.append("{arg_name}=NOTHING".format(arg_name=arg_name)) - lines.append("if {arg_name} is not NOTHING:" - .format(arg_name=arg_name)) + arg = "{arg_name}=NOTHING".format(arg_name=arg_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append( + "if {arg_name} is not NOTHING:".format(arg_name=arg_name) + ) init_factory_name = _init_factory_pat.format(a.name) if a.converter is not None: - lines.append(" " + fmt_setter_with_converter( - attr_name, arg_name - )) + lines.append( + " " + fmt_setter_with_converter(attr_name, arg_name) + ) lines.append("else:") - lines.append(" " + fmt_setter_with_converter( - attr_name, - init_factory_name + "({0})".format(maybe_self) - )) - names_for_globals[_init_converter_pat.format(a.name)] = ( - a.converter + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) ) + names_for_globals[ + _init_converter_pat.format(a.name) + ] = a.converter else: lines.append(" " + fmt_setter(attr_name, arg_name)) lines.append("else:") - lines.append(" " + fmt_setter( - attr_name, - init_factory_name + "({0})".format(maybe_self) - )) + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "({0})".format(maybe_self), + ) + ) names_for_globals[init_factory_name] = a.default.factory else: - args.append(arg_name) + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) if a.converter is not None: lines.append(fmt_setter_with_converter(attr_name, arg_name)) - names_for_globals[_init_converter_pat.format(a.name)] = ( - a.converter - ) + names_for_globals[ + _init_converter_pat.format(a.name) + ] = a.converter else: lines.append(fmt_setter(attr_name, arg_name)) @@ -1364,20 +1572,52 @@ def fmt_setter_with_converter(attr_name, value_var): for a in attrs_to_validate: val_name = "__attr_validator_{}".format(a.name) attr_name = "__attr_{}".format(a.name) - lines.append(" {}(self, {}, self.{})".format( - val_name, attr_name, a.name)) + lines.append( + " {}(self, {}, self.{})".format(val_name, attr_name, a.name) + ) names_for_globals[val_name] = a.validator names_for_globals[attr_name] = a if post_init: lines.append("self.__attrs_post_init__()") - return """\ + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + args = ", ".join(args) + if kw_only_args: + if PY2: + raise PythonTooOldError( + "Keyword-only arguments only work on Python 3 and later." + ) + + args += "{leading_comma}*, {kw_only_args}".format( + leading_comma=", " if args else "", + kw_only_args=", ".join(kw_only_args), + ) + return ( + """\ def __init__(self, {args}): {lines} """.format( - args=", ".join(args), - lines="\n ".join(lines) if lines else "pass", - ), names_for_globals, annotations + args=args, lines="\n ".join(lines) if lines else "pass" + ), + names_for_globals, + annotations, + ) class Attribute(object): @@ -1390,13 +1630,36 @@ class Attribute(object): For the version history of the fields, see :func:`attr.ib`. """ + __slots__ = ( - "name", "default", "validator", "repr", "cmp", "hash", "init", - "metadata", "type", "converter", + "name", + "default", + "validator", + "repr", + "cmp", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", ) - def __init__(self, name, default, validator, repr, cmp, hash, init, - convert=None, metadata=None, type=None, converter=None): + def __init__( + self, + name, + default, + validator, + repr, + cmp, + hash, + init, + convert=None, + metadata=None, + type=None, + converter=None, + kw_only=False, + ): # Cache this descriptor here to speed things up later. bound_setattr = _obj_setattr.__get__(self, Attribute) @@ -1411,7 +1674,8 @@ def __init__(self, name, default, validator, repr, cmp, hash, init, warnings.warn( "The `convert` argument is deprecated in favor of `converter`." " It will be removed after 2019/01.", - DeprecationWarning, stacklevel=2 + DeprecationWarning, + stacklevel=2, ) converter = convert @@ -1423,11 +1687,16 @@ def __init__(self, name, default, validator, repr, cmp, hash, init, bound_setattr("hash", hash) bound_setattr("init", init) bound_setattr("converter", converter) - bound_setattr("metadata", ( - metadata_proxy(metadata) if metadata - else _empty_metadata_singleton - )) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) bound_setattr("type", type) + bound_setattr("kw_only", kw_only) def __setattr__(self, name, value): raise FrozenInstanceError() @@ -1437,7 +1706,8 @@ def convert(self): warnings.warn( "The `convert` attribute is deprecated in favor of `converter`. " "It will be removed after 2019/01.", - DeprecationWarning, stacklevel=2, + DeprecationWarning, + stacklevel=2, ) return self.converter @@ -1452,49 +1722,82 @@ def from_counting_attr(cls, name, ca, type=None): ) inst_dict = { k: getattr(ca, k) - for k - in Attribute.__slots__ - if k not in ( - "name", "validator", "default", "type", "convert", + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "convert", ) # exclude methods and deprecated alias } return cls( - name=name, validator=ca._validator, default=ca._default, type=type, + name=name, + validator=ca._validator, + default=ca._default, + type=type, **inst_dict ) + # Don't use attr.assoc since fields(Attribute) doesn't work + def _assoc(self, **changes): + """ + Copy *self* and apply *changes*. + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + # Don't use _add_pickle since fields(Attribute) doesn't work def __getstate__(self): """ Play nice with pickle. """ - return tuple(getattr(self, name) if name != "metadata" - else dict(self.metadata) - for name in self.__slots__) + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) def __setstate__(self, state): """ Play nice with pickle. """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in zip(self.__slots__, state): + for name, value in name_values_pairs: if name != "metadata": bound_setattr(name, value) else: - bound_setattr(name, metadata_proxy(value) if value else - _empty_metadata_singleton) + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) _a = [ - Attribute(name=name, default=NOTHING, validator=None, - repr=True, cmp=True, hash=(name != "metadata"), init=True) + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=True, + hash=(name != "metadata"), + init=True, + ) for name in Attribute.__slots__ if name != "convert" # XXX: remove once `convert` is gone ] Attribute = _add_hash( _add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a), - attrs=[a for a in _a if a.hash] + attrs=[a for a in _a if a.hash], ) @@ -1506,21 +1809,59 @@ class _CountingAttr(object): *Internal* data structure of the attrs library. Running into is most likely the result of a bug like a forgotten `@attr.s` decorator. """ - __slots__ = ("counter", "_default", "repr", "cmp", "hash", "init", - "metadata", "_validator", "converter", "type") + + __slots__ = ( + "counter", + "_default", + "repr", + "cmp", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + ) __attrs_attrs__ = tuple( - Attribute(name=name, default=NOTHING, validator=None, - repr=True, cmp=True, hash=True, init=True) - for name - in ("counter", "_default", "repr", "cmp", "hash", "init",) + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=True, + hash=True, + init=True, + kw_only=False, + ) + for name in ("counter", "_default", "repr", "cmp", "hash", "init") ) + ( - Attribute(name="metadata", default=None, validator=None, - repr=True, cmp=True, hash=False, init=True), + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=True, + hash=False, + init=True, + kw_only=False, + ), ) cls_counter = 0 - def __init__(self, default, validator, repr, cmp, hash, init, converter, - metadata, type): + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + ): _CountingAttr.cls_counter += 1 self.counter = _CountingAttr.cls_counter self._default = default @@ -1536,6 +1877,7 @@ def __init__(self, default, validator, repr, cmp, hash, init, converter, self.converter = converter self.metadata = metadata self.type = type + self.kw_only = kw_only def validator(self, meth): """ @@ -1587,6 +1929,7 @@ class Factory(object): .. versionadded:: 17.1.0 *takes_self* """ + factory = attrib() takes_self = attrib() @@ -1636,7 +1979,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): type_ = type( name, bases, - {} if post_init is None else {"__attrs_post_init__": post_init} + {} if post_init is None else {"__attrs_post_init__": post_init}, ) # For pickling to work, the __module__ variable needs to be set to the # frame where the class is created. Bypass this step in environments where @@ -1644,7 +1987,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): # defined for arguments greater than 0 (IronPython). try: type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__", + "__name__", "__main__" ) except (AttributeError, ValueError): pass @@ -1661,6 +2004,7 @@ class _AndValidator(object): """ Compose many validators to a single one. """ + _validators = attrib() def __call__(self, inst, attr, value): @@ -1682,7 +2026,8 @@ def and_(*validators): vals = [] for validator in validators: vals.extend( - validator._validators if isinstance(validator, _AndValidator) + validator._validators + if isinstance(validator, _AndValidator) else [validator] ) diff --git a/pipenv/vendor/attr/converters.py b/pipenv/vendor/attr/converters.py index 3b3bac92be..37c4a07a06 100644 --- a/pipenv/vendor/attr/converters.py +++ b/pipenv/vendor/attr/converters.py @@ -4,6 +4,8 @@ from __future__ import absolute_import, division, print_function +from ._make import NOTHING, Factory + def optional(converter): """ @@ -13,7 +15,7 @@ def optional(converter): :param callable converter: the converter that is used for non-``None`` values. - .. versionadded:: 17.1.0 + .. versionadded:: 17.1.0 """ def optional_converter(val): @@ -22,3 +24,55 @@ def optional_converter(val): return converter(val) return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of :class:`attr.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes not parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of :class:`attr.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter diff --git a/pipenv/vendor/attr/converters.pyi b/pipenv/vendor/attr/converters.pyi new file mode 100644 index 0000000000..63b2a3866e --- /dev/null +++ b/pipenv/vendor/attr/converters.pyi @@ -0,0 +1,12 @@ +from typing import TypeVar, Optional, Callable, overload +from . import _ConverterType + +_T = TypeVar("_T") + +def optional( + converter: _ConverterType[_T] +) -> _ConverterType[Optional[_T]]: ... +@overload +def default_if_none(default: _T) -> _ConverterType[_T]: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ... diff --git a/pipenv/vendor/attr/exceptions.py b/pipenv/vendor/attr/exceptions.py index f949f3c9c0..b12e41e97a 100644 --- a/pipenv/vendor/attr/exceptions.py +++ b/pipenv/vendor/attr/exceptions.py @@ -10,6 +10,7 @@ class FrozenInstanceError(AttributeError): .. versionadded:: 16.1.0 """ + msg = "can't set attribute" args = [msg] @@ -46,3 +47,11 @@ class UnannotatedAttributeError(RuntimeError): .. versionadded:: 17.3.0 """ + + +class PythonTooOldError(RuntimeError): + """ + An ``attrs`` feature requiring a more recent python version has been used. + + .. versionadded:: 18.2.0 + """ diff --git a/pipenv/vendor/attr/exceptions.pyi b/pipenv/vendor/attr/exceptions.pyi new file mode 100644 index 0000000000..48fffcc1e2 --- /dev/null +++ b/pipenv/vendor/attr/exceptions.pyi @@ -0,0 +1,7 @@ +class FrozenInstanceError(AttributeError): + msg: str = ... + +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... diff --git a/pipenv/vendor/attr/filters.pyi b/pipenv/vendor/attr/filters.pyi new file mode 100644 index 0000000000..a618140c2d --- /dev/null +++ b/pipenv/vendor/attr/filters.pyi @@ -0,0 +1,5 @@ +from typing import Union +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute]) -> _FilterType: ... +def exclude(*what: Union[type, Attribute]) -> _FilterType: ... diff --git a/pipenv/vendor/attr/py.typed b/pipenv/vendor/attr/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/attr/validators.py b/pipenv/vendor/attr/validators.py index f8892fcdfe..f12d0aa593 100644 --- a/pipenv/vendor/attr/validators.py +++ b/pipenv/vendor/attr/validators.py @@ -7,13 +7,7 @@ from ._make import _AndValidator, and_, attrib, attrs -__all__ = [ - "and_", - "in_", - "instance_of", - "optional", - "provides", -] +__all__ = ["and_", "in_", "instance_of", "optional", "provides"] @attrs(repr=False, slots=True, hash=True) @@ -27,16 +21,20 @@ def __call__(self, inst, attr, value): if not isinstance(value, self.type): raise TypeError( "'{name}' must be {type!r} (got {value!r} that is a " - "{actual!r})." - .format(name=attr.name, type=self.type, - actual=value.__class__, value=value), - attr, self.type, value, + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, ) def __repr__(self): - return ( - "" - .format(type=self.type) + return "".format( + type=self.type ) @@ -67,15 +65,17 @@ def __call__(self, inst, attr, value): if not self.interface.providedBy(value): raise TypeError( "'{name}' must provide {interface!r} which {value!r} " - "doesn't." - .format(name=attr.name, interface=self.interface, value=value), - attr, self.interface, value, + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, ) def __repr__(self): - return ( - "" - .format(interface=self.interface) + return "".format( + interface=self.interface ) @@ -106,9 +106,8 @@ def __call__(self, inst, attr, value): self.validator(inst, attr, value) def __repr__(self): - return ( - "" - .format(what=repr(self.validator)) + return "".format( + what=repr(self.validator) ) @@ -135,16 +134,21 @@ class _InValidator(object): options = attrib() def __call__(self, inst, attr, value): - if value not in self.options: + try: + in_options = value in self.options + except TypeError as e: # e.g. `1 in "abc"` + in_options = False + + if not in_options: raise ValueError( - "'{name}' must be in {options!r} (got {value!r})" - .format(name=attr.name, options=self.options, value=value) + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) ) def __repr__(self): - return ( - "" - .format(options=self.options) + return "".format( + options=self.options ) diff --git a/pipenv/vendor/attr/validators.pyi b/pipenv/vendor/attr/validators.pyi new file mode 100644 index 0000000000..abbaedf10a --- /dev/null +++ b/pipenv/vendor/attr/validators.pyi @@ -0,0 +1,14 @@ +from typing import Container, List, Union, TypeVar, Type, Any, Optional, Tuple +from . import _ValidatorType + +_T = TypeVar("_T") + +def instance_of( + type: Union[Tuple[Type[_T], ...], Type[_T]] +) -> _ValidatorType[_T]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... diff --git a/pipenv/vendor/click/LICENSE b/pipenv/vendor/click/LICENSE deleted file mode 100644 index 1704daa2c0..0000000000 --- a/pipenv/vendor/click/LICENSE +++ /dev/null @@ -1,38 +0,0 @@ -Copyright (c) 2014 by Armin Ronacher. - -Click uses parts of optparse written by Gregory P. Ward and maintained by the -Python software foundation. This is limited to code in the parser.py -module: - -Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved. -Copyright (c) 2002-2006 Python Software Foundation. All rights reserved. - -Some rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/click/LICENSE.rst b/pipenv/vendor/click/LICENSE.rst new file mode 100644 index 0000000000..87ce152aaf --- /dev/null +++ b/pipenv/vendor/click/LICENSE.rst @@ -0,0 +1,39 @@ +Copyright © 2014 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms of the software as +well as documentation, with or without modification, are permitted +provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +- Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + +---- + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright © 2001-2006 Gregory P. Ward. All rights reserved. +Copyright © 2002-2006 Python Software Foundation. All rights reserved. diff --git a/pipenv/vendor/click/__init__.py b/pipenv/vendor/click/__init__.py index 971e55d0a8..d3c33660a9 100644 --- a/pipenv/vendor/click/__init__.py +++ b/pipenv/vendor/click/__init__.py @@ -1,17 +1,15 @@ # -*- coding: utf-8 -*- """ - click - ~~~~~ +click +~~~~~ - Click is a simple Python module that wraps the stdlib's optparse to make - writing command line scripts fun. Unlike other modules, it's based around - a simple API that does not come with too much magic and is composable. +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. - In case optparse ever gets removed from the stdlib, it will be shipped by - this module. - - :copyright: (c) 2014 by Armin Ronacher. - :license: BSD, see LICENSE for more details. +:copyright: © 2014 by the Pallets team. +:license: BSD, see LICENSE.rst for more details. """ # Core classes @@ -28,7 +26,7 @@ # Types from .types import ParamType, File, Path, Choice, IntRange, Tuple, \ - STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED + DateTime, STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED, FloatRange # Utilities from .utils import echo, get_binary_stream, get_text_stream, open_file, \ @@ -65,8 +63,9 @@ 'version_option', 'help_option', # Types - 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', 'STRING', - 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', + 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', + 'DateTime', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', + 'FloatRange', # Utilities 'echo', 'get_binary_stream', 'get_text_stream', 'open_file', @@ -95,4 +94,4 @@ disable_unicode_literals_warning = False -__version__ = '6.7' +__version__ = '7.0' diff --git a/pipenv/vendor/click/_bashcomplete.py b/pipenv/vendor/click/_bashcomplete.py index d9d26d28b0..a5f1084c9a 100644 --- a/pipenv/vendor/click/_bashcomplete.py +++ b/pipenv/vendor/click/_bashcomplete.py @@ -1,27 +1,81 @@ +import copy import os import re + from .utils import echo from .parser import split_arg_string -from .core import MultiCommand, Option +from .core import MultiCommand, Option, Argument +from .types import Choice + +try: + from collections import abc +except ImportError: + import collections as abc +WORDBREAK = '=' -COMPLETION_SCRIPT = ''' +# Note, only BASH version 4.4 and later have the nosort option. +COMPLETION_SCRIPT_BASH = ''' %(complete_func)s() { + local IFS=$'\n' COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ COMP_CWORD=$COMP_CWORD \\ %(autocomplete_var)s=complete $1 ) ) return 0 } -complete -F %(complete_func)s -o default %(script_names)s +%(complete_func)setup() { + local COMPLETION_OPTIONS="" + local BASH_VERSION_ARR=(${BASH_VERSION//./ }) + # Only BASH version 4.4 and later have the nosort option. + if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then + COMPLETION_OPTIONS="-o nosort" + fi + + complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s +} + +%(complete_func)setup +''' + +COMPLETION_SCRIPT_ZSH = ''' +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\ + COMP_CWORD=$((CURRENT-1)) \\ + %(autocomplete_var)s=\"complete_zsh\" \\ + %(script_names)s )}") + + for key descr in ${(kv)response}; do + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U -Q + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -Q -a completions + fi + compstate[insert]="automenu" +} + +compdef %(complete_func)s %(script_names)s ''' _invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') -def get_completion_script(prog_name, complete_var): +def get_completion_script(prog_name, complete_var, shell): cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) - return (COMPLETION_SCRIPT % { + script = COMPLETION_SCRIPT_ZSH if shell == 'zsh' else COMPLETION_SCRIPT_BASH + return (script % { 'complete_func': '_%s_completion' % cf_name, 'script_names': prog_name, 'autocomplete_var': complete_var, @@ -29,37 +83,189 @@ def get_completion_script(prog_name, complete_var): def resolve_ctx(cli, prog_name, args): + """ + Parse into a hierarchy of contexts. Contexts are connected through the parent variable. + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :return: the final context/command parsed + """ ctx = cli.make_context(prog_name, args, resilient_parsing=True) - while ctx.protected_args + ctx.args and isinstance(ctx.command, MultiCommand): - a = ctx.protected_args + ctx.args - cmd = ctx.command.get_command(ctx, a[0]) - if cmd is None: - return None - ctx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + while args: + if isinstance(ctx.command, MultiCommand): + if not ctx.command.chain: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + ctx = cmd.make_context(cmd_name, args, parent=ctx, + resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + # Walk chained subcommand contexts saving the last one. + while args: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True) + args = sub_ctx.args + ctx = sub_ctx + args = sub_ctx.protected_args + sub_ctx.args + else: + break return ctx +def start_of_option(param_str): + """ + :param param_str: param_str to check + :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--") + """ + return param_str and param_str[:1] == '-' + + +def is_incomplete_option(all_args, cmd_param): + """ + :param all_args: the full original list of args supplied + :param cmd_param: the current command paramter + :return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and + corresponds to this cmd_param. In other words whether this cmd_param option can still accept + values + """ + if not isinstance(cmd_param, Option): + return False + if cmd_param.is_flag: + return False + last_option = None + for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])): + if index + 1 > cmd_param.nargs: + break + if start_of_option(arg_str): + last_option = arg_str + + return True if last_option and last_option in cmd_param.opts else False + + +def is_incomplete_argument(current_params, cmd_param): + """ + :param current_params: the current params and values for this argument as already entered + :param cmd_param: the current command parameter + :return: whether or not the last argument is incomplete and corresponds to this cmd_param. In + other words whether or not the this cmd_param argument can still accept values + """ + if not isinstance(cmd_param, Argument): + return False + current_param_values = current_params[cmd_param.name] + if current_param_values is None: + return True + if cmd_param.nargs == -1: + return True + if isinstance(current_param_values, abc.Iterable) \ + and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs: + return True + return False + + +def get_user_autocompletions(ctx, args, incomplete, cmd_param): + """ + :param ctx: context associated with the parsed command + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :param cmd_param: command definition + :return: all the possible user-specified completions for the param + """ + results = [] + if isinstance(cmd_param.type, Choice): + # Choices don't support descriptions. + results = [(c, None) + for c in cmd_param.type.choices if str(c).startswith(incomplete)] + elif cmd_param.autocompletion is not None: + dynamic_completions = cmd_param.autocompletion(ctx=ctx, + args=args, + incomplete=incomplete) + results = [c if isinstance(c, tuple) else (c, None) + for c in dynamic_completions] + return results + + +def get_visible_commands_starting_with(ctx, starts_with): + """ + :param ctx: context associated with the parsed command + :starts_with: string that visible commands must start with. + :return: all visible (not hidden) commands that start with starts_with. + """ + for c in ctx.command.list_commands(ctx): + if c.startswith(starts_with): + command = ctx.command.get_command(ctx, c) + if not command.hidden: + yield command + + +def add_subcommand_completions(ctx, incomplete, completions_out): + # Add subcommand completions. + if isinstance(ctx.command, MultiCommand): + completions_out.extend( + [(c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete)]) + + # Walk up the context list and add any other completion possibilities from chained commands + while ctx.parent is not None: + ctx = ctx.parent + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + remaining_commands = [c for c in get_visible_commands_starting_with(ctx, incomplete) + if c.name not in ctx.protected_args] + completions_out.extend([(c.name, c.get_short_help_str()) for c in remaining_commands]) + + def get_choices(cli, prog_name, args, incomplete): + """ + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :return: all the possible completions for the incomplete + """ + all_args = copy.deepcopy(args) + ctx = resolve_ctx(cli, prog_name, args) if ctx is None: - return + return [] + + # In newer versions of bash long opts with '='s are partitioned, but it's easier to parse + # without the '=' + if start_of_option(incomplete) and WORDBREAK in incomplete: + partition_incomplete = incomplete.partition(WORDBREAK) + all_args.append(partition_incomplete[0]) + incomplete = partition_incomplete[2] + elif incomplete == WORDBREAK: + incomplete = '' - choices = [] - if incomplete and not incomplete[:1].isalnum(): + completions = [] + if start_of_option(incomplete): + # completions for partial options for param in ctx.command.params: - if not isinstance(param, Option): - continue - choices.extend(param.opts) - choices.extend(param.secondary_opts) - elif isinstance(ctx.command, MultiCommand): - choices.extend(ctx.command.list_commands(ctx)) + if isinstance(param, Option) and not param.hidden: + param_opts = [param_opt for param_opt in param.opts + + param.secondary_opts if param_opt not in all_args or param.multiple] + completions.extend([(o, param.help) for o in param_opts if o.startswith(incomplete)]) + return completions + # completion for option values from user supplied values + for param in ctx.command.params: + if is_incomplete_option(all_args, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + # completion for argument values from user supplied values + for param in ctx.command.params: + if is_incomplete_argument(ctx.params, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) - for item in choices: - if item.startswith(incomplete): - yield item + add_subcommand_completions(ctx, incomplete, completions) + # Sort before returning so that proper ordering can be enforced in custom types. + return sorted(completions) -def do_complete(cli, prog_name): +def do_complete(cli, prog_name, include_descriptions): cwords = split_arg_string(os.environ['COMP_WORDS']) cword = int(os.environ['COMP_CWORD']) args = cwords[1:cword] @@ -69,15 +275,19 @@ def do_complete(cli, prog_name): incomplete = '' for item in get_choices(cli, prog_name, args, incomplete): - echo(item) + echo(item[0]) + if include_descriptions: + # ZSH has trouble dealing with empty array parameters when returned from commands, so use a well defined character '_' to indicate no description is present. + echo(item[1] if item[1] else '_') return True def bashcomplete(cli, prog_name, complete_var, complete_instr): - if complete_instr == 'source': - echo(get_completion_script(prog_name, complete_var)) + if complete_instr.startswith('source'): + shell = 'zsh' if complete_instr == 'source_zsh' else 'bash' + echo(get_completion_script(prog_name, complete_var, shell)) return True - elif complete_instr == 'complete': - return do_complete(cli, prog_name) + elif complete_instr == 'complete' or complete_instr == 'complete_zsh': + return do_complete(cli, prog_name, complete_instr == 'complete_zsh') return False diff --git a/pipenv/vendor/click/_compat.py b/pipenv/vendor/click/_compat.py index 2b43412c4d..937e2301d4 100644 --- a/pipenv/vendor/click/_compat.py +++ b/pipenv/vendor/click/_compat.py @@ -7,24 +7,31 @@ PY2 = sys.version_info[0] == 2 -WIN = sys.platform.startswith('win') +CYGWIN = sys.platform.startswith('cygwin') +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) +WIN = sys.platform.startswith('win') and not APP_ENGINE DEFAULT_COLUMNS = 80 -_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])') +_ansi_re = re.compile(r'\033\[((?:\d|;)*)([a-zA-Z])') def get_filesystem_encoding(): return sys.getfilesystemencoding() or sys.getdefaultencoding() -def _make_text_stream(stream, encoding, errors): +def _make_text_stream(stream, encoding, errors, + force_readable=False, force_writable=False): if encoding is None: encoding = get_best_encoding(stream) if errors is None: errors = 'replace' return _NonClosingTextIOWrapper(stream, encoding, errors, - line_buffering=True) + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable) def is_ascii_encoding(encoding): @@ -45,8 +52,10 @@ def get_best_encoding(stream): class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__(self, stream, encoding, errors, **extra): - self._stream = stream = _FixupStream(stream) + def __init__(self, stream, encoding, errors, + force_readable=False, force_writable=False, **extra): + self._stream = stream = _FixupStream(stream, force_readable, + force_writable) io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) # The io module is a place where the Python 3 text behavior @@ -81,10 +90,16 @@ class _FixupStream(object): """The new io interface needs more from streams than streams traditionally implement. As such, this fix-up code is necessary in some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). """ - def __init__(self, stream): + def __init__(self, stream, force_readable=False, force_writable=False): self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable def __getattr__(self, name): return getattr(self._stream, name) @@ -101,6 +116,8 @@ def read1(self, size): return self._stream.read(size) def readable(self): + if self._force_readable: + return True x = getattr(self._stream, 'readable', None) if x is not None: return x() @@ -111,6 +128,8 @@ def readable(self): return True def writable(self): + if self._force_writable: + return True x = getattr(self._stream, 'writable', None) if x is not None: return x() @@ -139,6 +158,7 @@ def seekable(self): bytes = str raw_input = raw_input string_types = (str, unicode) + int_types = (int, long) iteritems = lambda x: x.iteritems() range_type = xrange @@ -165,10 +185,13 @@ def is_bytes(x): # available (which is why we use try-catch instead of the WIN variable # here), such as the Google App Engine development server on Windows. In # those cases there is just nothing we can do. + def set_binary_mode(f): + return f + try: import msvcrt except ImportError: - set_binary_mode = lambda x: x + pass else: def set_binary_mode(f): try: @@ -179,6 +202,21 @@ def set_binary_mode(f): msvcrt.setmode(fileno, os.O_BINARY) return f + try: + import fcntl + except ImportError: + pass + else: + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + flags = fcntl.fcntl(fileno, fcntl.F_GETFL) + fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) + return f + def isidentifier(x): return _identifier_re.search(x) is not None @@ -186,28 +224,35 @@ def get_binary_stdin(): return set_binary_mode(sys.stdin) def get_binary_stdout(): + _wrap_std_stream('stdout') return set_binary_mode(sys.stdout) def get_binary_stderr(): + _wrap_std_stream('stderr') return set_binary_mode(sys.stderr) def get_text_stdin(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stdin, encoding, errors) if rv is not None: return rv - return _make_text_stream(sys.stdin, encoding, errors) + return _make_text_stream(sys.stdin, encoding, errors, + force_readable=True) def get_text_stdout(encoding=None, errors=None): + _wrap_std_stream('stdout') rv = _get_windows_console_stream(sys.stdout, encoding, errors) if rv is not None: return rv - return _make_text_stream(sys.stdout, encoding, errors) + return _make_text_stream(sys.stdout, encoding, errors, + force_writable=True) def get_text_stderr(encoding=None, errors=None): + _wrap_std_stream('stderr') rv = _get_windows_console_stream(sys.stderr, encoding, errors) if rv is not None: return rv - return _make_text_stream(sys.stderr, encoding, errors) + return _make_text_stream(sys.stderr, encoding, errors, + force_writable=True) def filename_to_ui(value): if isinstance(value, bytes): @@ -218,6 +263,7 @@ def filename_to_ui(value): text_type = str raw_input = input string_types = (str,) + int_types = (int,) range_type = range isidentifier = lambda x: x.isidentifier() iteritems = lambda x: iter(x.items()) @@ -298,7 +344,8 @@ def _is_compatible_text_stream(stream, encoding, errors): return False - def _force_correct_text_reader(text_reader, encoding, errors): + def _force_correct_text_reader(text_reader, encoding, errors, + force_readable=False): if _is_binary_reader(text_reader, False): binary_reader = text_reader else: @@ -324,9 +371,11 @@ def _force_correct_text_reader(text_reader, encoding, errors): # we're so fundamentally fucked that nothing can repair it. if errors is None: errors = 'replace' - return _make_text_stream(binary_reader, encoding, errors) + return _make_text_stream(binary_reader, encoding, errors, + force_readable=force_readable) - def _force_correct_text_writer(text_writer, encoding, errors): + def _force_correct_text_writer(text_writer, encoding, errors, + force_writable=False): if _is_binary_writer(text_writer, False): binary_writer = text_writer else: @@ -352,7 +401,8 @@ def _force_correct_text_writer(text_writer, encoding, errors): # we're so fundamentally fucked that nothing can repair it. if errors is None: errors = 'replace' - return _make_text_stream(binary_writer, encoding, errors) + return _make_text_stream(binary_writer, encoding, errors, + force_writable=force_writable) def get_binary_stdin(): reader = _find_binary_reader(sys.stdin) @@ -379,19 +429,22 @@ def get_text_stdin(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stdin, encoding, errors) if rv is not None: return rv - return _force_correct_text_reader(sys.stdin, encoding, errors) + return _force_correct_text_reader(sys.stdin, encoding, errors, + force_readable=True) def get_text_stdout(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stdout, encoding, errors) if rv is not None: return rv - return _force_correct_text_writer(sys.stdout, encoding, errors) + return _force_correct_text_writer(sys.stdout, encoding, errors, + force_writable=True) def get_text_stderr(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stderr, encoding, errors) if rv is not None: return rv - return _force_correct_text_writer(sys.stderr, encoding, errors) + return _force_correct_text_writer(sys.stderr, encoding, errors, + force_writable=True) def filename_to_ui(value): if isinstance(value, bytes): @@ -420,7 +473,7 @@ def open_stream(filename, mode='r', encoding=None, errors='strict', # Standard streams first. These are simple because they don't need # special handling for the atomic flag. It's entirely ignored. if filename == '-': - if 'w' in mode: + if any(m in mode for m in ['w', 'a', 'x']): if 'b' in mode: return get_binary_stdout(), False return get_text_stdout(encoding=encoding, errors=errors), False @@ -460,7 +513,7 @@ def open_stream(filename, mode='r', encoding=None, errors='strict', else: f = os.fdopen(fd, mode) - return _AtomicFile(f, tmp_filename, filename), True + return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True # Used in a destructor call, needs extra protection from interpreter cleanup. @@ -533,7 +586,7 @@ def should_strip_ansi(stream=None, color=None): # Windows has a smaller terminal DEFAULT_COLUMNS = 79 - from ._winconsole import _get_windows_console_stream + from ._winconsole import _get_windows_console_stream, _wrap_std_stream def _get_argv_encoding(): import locale @@ -595,6 +648,7 @@ def _get_argv_encoding(): return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding() _get_windows_console_stream = lambda *x: None + _wrap_std_stream = lambda *x: None def term_len(x): @@ -620,6 +674,7 @@ def func(): return rv rv = wrapper_func() try: + stream = src_func() # In case wrapper_func() modified the stream cache[stream] = rv except Exception: pass diff --git a/pipenv/vendor/click/_termui_impl.py b/pipenv/vendor/click/_termui_impl.py index 7cfd3d5c4a..00a8e5ef1c 100644 --- a/pipenv/vendor/click/_termui_impl.py +++ b/pipenv/vendor/click/_termui_impl.py @@ -1,20 +1,24 @@ +# -*- coding: utf-8 -*- """ - click._termui_impl - ~~~~~~~~~~~~~~~~~~ +click._termui_impl +~~~~~~~~~~~~~~~~~~ - This module contains implementations for the termui module. To keep the - import time of Click down, some infrequently used functionality is placed - in this module and only imported as needed. +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. - :copyright: (c) 2014 by Armin Ronacher. - :license: BSD, see LICENSE for more details. +:copyright: © 2014 by the Pallets team. +:license: BSD, see LICENSE.rst for more details. """ + import os import sys import time import math +import contextlib from ._compat import _default_text_stdout, range_type, PY2, isatty, \ - open_stream, strip_ansi, term_len, get_best_encoding, WIN + open_stream, strip_ansi, term_len, get_best_encoding, WIN, int_types, \ + CYGWIN from .utils import echo from .exceptions import ClickException @@ -41,7 +45,7 @@ def _length_hint(obj): except TypeError: return None if hint is NotImplemented or \ - not isinstance(hint, (int, long)) or \ + not isinstance(hint, int_types) or \ hint < 0: return None return hint @@ -88,6 +92,7 @@ def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', self.current_item = None self.is_hidden = not isatty(self.file) self._last_line = None + self.short_limit = 0.5 def __enter__(self): self.entered = True @@ -101,10 +106,13 @@ def __iter__(self): if not self.entered: raise RuntimeError('You need to use progress bars in a with block.') self.render_progress() - return self + return self.generator() + + def is_fast(self): + return time.time() - self.start <= self.short_limit def render_finish(self): - if self.is_hidden: + if self.is_hidden or self.is_fast(): return self.file.write(AFTER_BAR) self.file.flush() @@ -129,13 +137,13 @@ def eta(self): def format_eta(self): if self.eta_known: - t = self.eta + 1 + t = int(self.eta) seconds = t % 60 - t /= 60 + t //= 60 minutes = t % 60 - t /= 60 + t //= 60 hours = t % 24 - t /= 24 + t //= 24 if t > 0: days = t return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) @@ -152,25 +160,27 @@ def format_pos(self): def format_pct(self): return ('% 4d%%' % int(self.pct * 100))[1:] - def format_progress_line(self): - show_percent = self.show_percent - - info_bits = [] + def format_bar(self): if self.length_known: bar_length = int(self.pct * self.width) bar = self.fill_char * bar_length bar += self.empty_char * (self.width - bar_length) - if show_percent is None: - show_percent = not self.show_pos + elif self.finished: + bar = self.fill_char * self.width else: - if self.finished: - bar = self.fill_char * self.width - else: - bar = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - bar[int((math.cos(self.pos * self.time_per_iteration) - / 2.0 + 0.5) * self.width)] = self.fill_char - bar = ''.join(bar) + bar = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + bar[int((math.cos(self.pos * self.time_per_iteration) + / 2.0 + 0.5) * self.width)] = self.fill_char + bar = ''.join(bar) + return bar + + def format_progress_line(self): + show_percent = self.show_percent + + info_bits = [] + if self.length_known and show_percent is None: + show_percent = not self.show_pos if self.show_pos: info_bits.append(self.format_pos()) @@ -185,49 +195,47 @@ def format_progress_line(self): return (self.bar_template % { 'label': self.label, - 'bar': bar, + 'bar': self.format_bar(), 'info': self.info_sep.join(info_bits) }).rstrip() def render_progress(self): from .termui import get_terminal_size - nl = False if self.is_hidden: - buf = [self.label] - nl = True - else: - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, get_terminal_size()[0] - clutter_length) - if new_width < old_width: - buf.append(BEFORE_BAR) - buf.append(' ' * self.max_width) - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - buf.append(line) - - buf.append(' ' * (clear_width - line_len)) - line = ''.join(buf) + return + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, get_terminal_size()[0] - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(' ' * self.max_width) + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(' ' * (clear_width - line_len)) + line = ''.join(buf) # Render the line only if it changed. - if line != self._last_line: + + if line != self._last_line and not self.is_fast(): self._last_line = line - echo(line, file=self.file, color=self.color, nl=nl) + echo(line, file=self.file, color=self.color, nl=False) self.file.flush() def make_step(self, n_steps): @@ -239,7 +247,16 @@ def make_step(self, n_steps): return self.last_eta = time.time() - self.avg = self.avg[-6:] + [-(self.start - time.time()) / (self.pos)] + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] self.eta_known = self.length_known @@ -252,54 +269,56 @@ def finish(self): self.current_item = None self.finished = True - def next(self): + def generator(self): + """ + Returns a generator which yields the items added to the bar during + construction, and updates the progress bar *after* the yielded block + returns. + """ + if not self.entered: + raise RuntimeError('You need to use progress bars in a with block.') + if self.is_hidden: - return next(self.iter) - try: - rv = next(self.iter) - self.current_item = rv - except StopIteration: + for rv in self.iter: + yield rv + else: + for rv in self.iter: + self.current_item = rv + yield rv + self.update(1) self.finish() self.render_progress() - raise StopIteration() - else: - self.update(1) - return rv - if not PY2: - __next__ = next - del next - -def pager(text, color=None): +def pager(generator, color=None): """Decide what method to use for paging through text.""" stdout = _default_text_stdout() if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, text, color) + return _nullpager(stdout, generator, color) pager_cmd = (os.environ.get('PAGER', None) or '').strip() if pager_cmd: if WIN: - return _tempfilepager(text, pager_cmd, color) - return _pipepager(text, pager_cmd, color) + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) if os.environ.get('TERM') in ('dumb', 'emacs'): - return _nullpager(stdout, text, color) + return _nullpager(stdout, generator, color) if WIN or sys.platform.startswith('os2'): - return _tempfilepager(text, 'more <', color) + return _tempfilepager(generator, 'more <', color) if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: - return _pipepager(text, 'less', color) + return _pipepager(generator, 'less', color) import tempfile fd, filename = tempfile.mkstemp() os.close(fd) try: if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: - return _pipepager(text, 'more', color) - return _nullpager(stdout, text, color) + return _pipepager(generator, 'more', color) + return _nullpager(stdout, generator, color) finally: os.unlink(filename) -def _pipepager(text, cmd, color): +def _pipepager(generator, cmd, color): """Page through text by feeding it to another program. Invoking a pager through this might support colors. """ @@ -317,17 +336,19 @@ def _pipepager(text, cmd, color): elif 'r' in less_flags or 'R' in less_flags: color = True - if not color: - text = strip_ansi(text) - c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) encoding = get_best_encoding(c.stdin) try: - c.stdin.write(text.encode(encoding, 'replace')) - c.stdin.close() + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text.encode(encoding, 'replace')) except (IOError, KeyboardInterrupt): pass + else: + c.stdin.close() # Less doesn't respect ^C, but catches it for its own UI purposes (aborting # search or other commands inside less). @@ -346,10 +367,12 @@ def _pipepager(text, cmd, color): break -def _tempfilepager(text, cmd, color): +def _tempfilepager(generator, cmd, color): """Page through text by invoking a program on a temporary file.""" import tempfile filename = tempfile.mktemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) if not color: text = strip_ansi(text) encoding = get_best_encoding(sys.stdout) @@ -361,11 +384,12 @@ def _tempfilepager(text, cmd, color): os.unlink(filename) -def _nullpager(stream, text, color): +def _nullpager(stream, generator, color): """Simply print unformatted text. This is the ultimate fallback.""" - if not color: - text = strip_ansi(text) - stream.write(text) + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) class Editor(object): @@ -478,6 +502,14 @@ def _unquote_file(url): args = 'start %s "" "%s"' % ( wait and '/WAIT' or '', url.replace('"', '')) return os.system(args) + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = 'cygstart "%s"' % (os.path.dirname(url).replace('"', '')) + else: + args = 'cygstart %s "%s"' % ( + wait and '-w' or '', url.replace('"', '')) + return os.system(args) try: if locate: @@ -497,32 +529,69 @@ def _unquote_file(url): def _translate_ch_to_exc(ch): - if ch == '\x03': + if ch == u'\x03': raise KeyboardInterrupt() - if ch == '\x04': + if ch == u'\x04' and not WIN: # Unix-like, Ctrl+D + raise EOFError() + if ch == u'\x1a' and WIN: # Windows, Ctrl+Z raise EOFError() if WIN: import msvcrt + @contextlib.contextmanager + def raw_terminal(): + yield + def getchar(echo): - rv = msvcrt.getch() + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. if echo: - msvcrt.putchar(rv) + func = msvcrt.getwche + else: + func = msvcrt.getwch + + rv = func() + if rv in (u'\x00', u'\xe0'): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() _translate_ch_to_exc(rv) - if PY2: - enc = getattr(sys.stdin, 'encoding', None) - if enc is not None: - rv = rv.decode(enc, 'replace') - else: - rv = rv.decode('cp1252', 'replace') return rv else: import tty import termios - def getchar(echo): + @contextlib.contextmanager + def raw_terminal(): if not isatty(sys.stdin): f = open('/dev/tty') fd = f.fileno() @@ -533,9 +602,7 @@ def getchar(echo): old_settings = termios.tcgetattr(fd) try: tty.setraw(fd) - ch = os.read(fd, 32) - if echo and isatty(sys.stdout): - sys.stdout.write(ch) + yield fd finally: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) sys.stdout.flush() @@ -543,5 +610,12 @@ def getchar(echo): f.close() except termios.error: pass - _translate_ch_to_exc(ch) - return ch.decode(get_best_encoding(sys.stdin), 'replace') + + def getchar(echo): + with raw_terminal() as fd: + ch = os.read(fd, 32) + ch = ch.decode(get_best_encoding(sys.stdin), 'replace') + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + _translate_ch_to_exc(ch) + return ch diff --git a/pipenv/vendor/click/_unicodefun.py b/pipenv/vendor/click/_unicodefun.py index 9e17a384ef..620edff37e 100644 --- a/pipenv/vendor/click/_unicodefun.py +++ b/pipenv/vendor/click/_unicodefun.py @@ -14,6 +14,8 @@ def _find_unicode_literals_frame(): import __future__ + if not hasattr(sys, '_getframe'): # not all Python implementations have it + return 0 frm = sys._getframe(1) idx = 1 while frm is not None: @@ -41,7 +43,7 @@ def _check_for_unicode_literals(): 'because it can introduce subtle bugs in your ' 'code. You should instead use explicit u"" literals ' 'for your unicode strings. For more information see ' - 'http://click.pocoo.org/python3/'), + 'https://click.palletsprojects.com/python3/'), stacklevel=bad_frame) @@ -60,8 +62,11 @@ def _verify_python3_env(): extra = '' if os.name == 'posix': import subprocess - rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, - stderr=subprocess.PIPE).communicate()[0] + try: + rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate()[0] + except OSError: + rv = b'' good_locales = set() has_c_utf8 = False @@ -94,7 +99,7 @@ def _verify_python3_env(): else: extra += ( 'This system lists a couple of UTF-8 supporting locales that\n' - 'you can pick from. The following suitable locales where\n' + 'you can pick from. The following suitable locales were\n' 'discovered: %s' ) % ', '.join(sorted(good_locales)) @@ -112,7 +117,9 @@ def _verify_python3_env(): 'is not supported' ) % bad_locale - raise RuntimeError('Click will abort further execution because Python 3 ' - 'was configured to use ASCII as encoding for the ' - 'environment. Consult http://click.pocoo.org/python3/' - 'for mitigation steps.' + extra) + raise RuntimeError( + 'Click will abort further execution because Python 3 was' + ' configured to use ASCII as encoding for the environment.' + ' Consult https://click.palletsprojects.com/en/7.x/python3/ for' + ' mitigation steps.' + extra + ) diff --git a/pipenv/vendor/click/_winconsole.py b/pipenv/vendor/click/_winconsole.py index 9aed942162..bbb080ddae 100644 --- a/pipenv/vendor/click/_winconsole.py +++ b/pipenv/vendor/click/_winconsole.py @@ -15,7 +15,7 @@ import time import ctypes import msvcrt -from click._compat import _NonClosingTextIOWrapper, text_type, PY2 +from ._compat import _NonClosingTextIOWrapper, text_type, PY2 from ctypes import byref, POINTER, c_int, c_char, c_char_p, \ c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE try: @@ -201,6 +201,40 @@ def __repr__(self): ) +class WindowsChunkedWriter(object): + """ + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()' which we wrap to write in + limited chunks due to a Windows limitation on binary console streams. + """ + def __init__(self, wrapped): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + total_to_write = len(text) + written = 0 + + while written < total_to_write: + to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) + self.__wrapped.write(text[written:written+to_write]) + written += to_write + + +_wrapped_std_streams = set() + + +def _wrap_std_stream(name): + # Python 2 & Windows 7 and below + if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams: + setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) + _wrapped_std_streams.add(name) + + def _get_text_stdin(buffer_stream): text_stream = _NonClosingTextIOWrapper( io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), @@ -210,14 +244,14 @@ def _get_text_stdin(buffer_stream): def _get_text_stdout(buffer_stream): text_stream = _NonClosingTextIOWrapper( - _WindowsConsoleWriter(STDOUT_HANDLE), + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), 'utf-16-le', 'strict', line_buffering=True) return ConsoleStream(text_stream, buffer_stream) def _get_text_stderr(buffer_stream): text_stream = _NonClosingTextIOWrapper( - _WindowsConsoleWriter(STDERR_HANDLE), + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), 'utf-16-le', 'strict', line_buffering=True) return ConsoleStream(text_stream, buffer_stream) @@ -261,7 +295,7 @@ def _get_windows_console_stream(f, encoding, errors): func = _stream_factories.get(f.fileno()) if func is not None: if not PY2: - f = getattr(f, 'buffer') + f = getattr(f, 'buffer', None) if f is None: return None else: diff --git a/pipenv/vendor/click/core.py b/pipenv/vendor/click/core.py index 7456451475..7a1e3422be 100644 --- a/pipenv/vendor/click/core.py +++ b/pipenv/vendor/click/core.py @@ -1,4 +1,5 @@ import errno +import inspect import os import sys from contextlib import contextmanager @@ -6,15 +7,16 @@ from functools import update_wrapper from .types import convert_type, IntRange, BOOL -from .utils import make_str, make_default_short_help, echo, get_os_args +from .utils import PacifyFlushWrapper, make_str, make_default_short_help, \ + echo, get_os_args from .exceptions import ClickException, UsageError, BadParameter, Abort, \ - MissingParameter -from .termui import prompt, confirm + MissingParameter, Exit +from .termui import prompt, confirm, style from .formatting import HelpFormatter, join_options from .parser import OptionParser, split_opt from .globals import push_context, pop_context -from ._compat import PY2, isidentifier, iteritems +from ._compat import PY2, isidentifier, iteritems, string_types from ._unicodefun import _check_for_unicode_literals, _verify_python3_env @@ -24,6 +26,24 @@ SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...' SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...' +DEPRECATED_HELP_NOTICE = ' (DEPRECATED)' +DEPRECATED_INVOKE_NOTICE = 'DeprecationWarning: ' + \ + 'The command %(name)s is deprecated.' + + +def _maybe_show_deprecated_notice(cmd): + if cmd.deprecated: + echo(style(DEPRECATED_INVOKE_NOTICE % {'name': cmd.name}, fg='red'), err=True) + + +def fast_exit(code): + """Exit without garbage collection, this speeds up exit by about 10ms for + things like bash completion. + """ + sys.stdout.flush() + sys.stderr.flush() + os._exit(code) + def _bashcomplete(cmd, prog_name, complete_var=None): """Internal handler for the bash completion support.""" @@ -35,7 +55,7 @@ def _bashcomplete(cmd, prog_name, complete_var=None): from ._bashcomplete import bashcomplete if bashcomplete(cmd, prog_name, complete_var, complete_instr): - sys.exit(1) + fast_exit(1) def _check_multicommand(base_command, cmd_name, cmd, register=False): @@ -50,9 +70,7 @@ def _check_multicommand(base_command, cmd_name, cmd, register=False): raise RuntimeError('%s. Command "%s" is set to chain and "%s" was ' 'added as subcommand but it in itself is a ' 'multi command. ("%s" is a %s within a chained ' - '%s named "%s"). This restriction was supposed to ' - 'be lifted in 6.0 but the fix was flawed. This ' - 'will be fixed in Click 7.0' % ( + '%s named "%s").' % ( hint, base_command.name, cmd_name, cmd_name, cmd.__class__.__name__, base_command.__class__.__name__, @@ -165,7 +183,8 @@ class Context(object): add some safety mapping on the right. :param resilient_parsing: if this flag is enabled then Click will parse without any interactivity or callback - invocation. This is useful for implementing + invocation. Default values will also be + ignored. This is useful for implementing things such as completion support. :param allow_extra_args: if this is set to `True` then extra arguments at the end will not raise an error and will be @@ -295,7 +314,8 @@ def __init__(self, command, parent=None, info_name=None, obj=None, self.token_normalize_func = token_normalize_func #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures. + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. self.resilient_parsing = resilient_parsing # If there is no envvar prefix yet, but the parent has one and @@ -308,7 +328,7 @@ def __init__(self, command, parent=None, info_name=None, obj=None, auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix, self.info_name.upper()) else: - self.auto_envvar_prefix = auto_envvar_prefix.upper() + auto_envvar_prefix = auto_envvar_prefix.upper() self.auto_envvar_prefix = auto_envvar_prefix if color is None and parent is not None: @@ -372,7 +392,7 @@ def scope(self, cleanup=True): @property def meta(self): """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utiltiies can store some + that are nested. It exists so that click utilities can store some state here if they need to. It is however the responsibility of that code to manage this dictionary well. @@ -481,7 +501,7 @@ def abort(self): def exit(self, code=0): """Exits the application with a given exit code.""" - sys.exit(code) + raise Exit(code) def get_usage(self): """Helper method to get formatted usage string for the current @@ -655,7 +675,7 @@ def main(self, args=None, prog_name=None, complete_var=None, name from ``sys.argv[0]``. :param complete_var: the environment variable that controls the bash completion support. The default is - ``"__COMPLETE"`` with prog name in + ``"__COMPLETE"`` with prog_name in uppercase. :param standalone_mode: the default behavior is to invoke the script in standalone mode. Click will then @@ -670,7 +690,7 @@ def main(self, args=None, prog_name=None, complete_var=None, constructor. See :class:`Context` for more information. """ # If we are in Python 3, we will verify that the environment is - # sane at this point of reject further execution to avoid a + # sane at this point or reject further execution to avoid a # broken script. if not PY2: _verify_python3_env() @@ -697,6 +717,13 @@ def main(self, args=None, prog_name=None, complete_var=None, rv = self.invoke(ctx) if not standalone_mode: return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness ctx.exit() except (EOFError, KeyboardInterrupt): echo(file=sys.stderr) @@ -708,9 +735,24 @@ def main(self, args=None, prog_name=None, complete_var=None, sys.exit(e.exit_code) except IOError as e: if e.errno == errno.EPIPE: + sys.stdout = PacifyFlushWrapper(sys.stdout) + sys.stderr = PacifyFlushWrapper(sys.stderr) sys.exit(1) else: raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code except Abort: if not standalone_mode: raise @@ -743,11 +785,16 @@ class Command(BaseCommand): shown on the command listing of the parent command. :param add_help_option: by default each command registers a ``--help`` option. This can be disabled by this parameter. + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. """ def __init__(self, name, context_settings=None, callback=None, params=None, help=None, epilog=None, short_help=None, - options_metavar='[OPTIONS]', add_help_option=True): + options_metavar='[OPTIONS]', add_help_option=True, + hidden=False, deprecated=False): BaseCommand.__init__(self, name, context_settings) #: the callback to execute when the command fires. This might be #: `None` in which case nothing happens. @@ -756,13 +803,17 @@ def __init__(self, name, context_settings=None, callback=None, #: should show up in the help page and execute. Eager parameters #: will automatically be handled before non eager ones. self.params = params or [] + # if a form feed (page break) is found in the help text, truncate help + # text to the content preceding the first form feed + if help and '\f' in help: + help = help.split('\f', 1)[0] self.help = help self.epilog = epilog self.options_metavar = options_metavar - if short_help is None and help: - short_help = make_default_short_help(help) self.short_help = short_help self.add_help_option = add_help_option + self.hidden = hidden + self.deprecated = deprecated def get_usage(self, ctx): formatter = ctx.make_formatter() @@ -816,8 +867,6 @@ def show_help(ctx, param, value): def make_parser(self, ctx): """Creates the underlying option parser for this command.""" parser = OptionParser(ctx) - parser.allow_interspersed_args = ctx.allow_interspersed_args - parser.ignore_unknown_options = ctx.ignore_unknown_options for param in self.get_params(ctx): param.add_to_parser(parser, ctx) return parser @@ -830,6 +879,10 @@ def get_help(self, ctx): self.format_help(ctx, formatter) return formatter.getvalue().rstrip('\n') + def get_short_help_str(self, limit=45): + """Gets short help for the command or makes it by shortening the long help string.""" + return self.short_help or self.help and make_default_short_help(self.help, limit) or '' + def format_help(self, ctx, formatter): """Writes the help into the formatter if it exists. @@ -850,7 +903,14 @@ def format_help_text(self, ctx, formatter): if self.help: formatter.write_paragraph() with formatter.indentation(): - formatter.write_text(self.help) + help_text = self.help + if self.deprecated: + help_text += DEPRECATED_HELP_NOTICE + formatter.write_text(help_text) + elif self.deprecated: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(DEPRECATED_HELP_NOTICE) def format_options(self, ctx, formatter): """Writes all the options into the formatter if they exist.""" @@ -891,6 +951,7 @@ def invoke(self, ctx): """Given a context, this invokes the attached callback (if it exists) in the right way. """ + _maybe_show_deprecated_notice(self) if self.callback is not None: return ctx.invoke(self.callback, **ctx.params) @@ -996,19 +1057,29 @@ def format_commands(self, ctx, formatter): """Extra format methods for multi methods that adds all the commands after the options. """ - rows = [] + commands = [] for subcommand in self.list_commands(ctx): cmd = self.get_command(ctx, subcommand) # What is this, the tool lied about a command. Ignore it if cmd is None: continue + if cmd.hidden: + continue - help = cmd.short_help or '' - rows.append((subcommand, help)) + commands.append((subcommand, cmd)) - if rows: - with formatter.section('Commands'): - formatter.write_dl(rows) + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section('Commands'): + formatter.write_dl(rows) def parse_args(self, ctx, args): if not args and self.no_args_is_help and not ctx.resilient_parsing: @@ -1111,7 +1182,7 @@ def resolve_command(self, ctx, args): # an option we want to kick off parsing again for arguments to # resolve things like --help which now should go to the main # place. - if cmd is None: + if cmd is None and not ctx.resilient_parsing: if split_opt(cmd_name)[0]: self.parse_args(ctx, ctx.args) ctx.fail('No such command "%s".' % original_cmd_name) @@ -1216,7 +1287,7 @@ def list_commands(self, ctx): class Parameter(object): - """A parameter to a command comes in two versions: they are either + r"""A parameter to a command comes in two versions: they are either :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently not supported by design as some of the internals for parsing are intentionally not finalized. @@ -1261,7 +1332,8 @@ class Parameter(object): def __init__(self, param_decls=None, type=None, required=False, default=None, callback=None, nargs=None, metavar=None, - expose_value=True, is_eager=False, envvar=None): + expose_value=True, is_eager=False, envvar=None, + autocompletion=None): self.name, self.opts, self.secondary_opts = \ self._parse_decls(param_decls or (), expose_value) @@ -1284,6 +1356,7 @@ def __init__(self, param_decls=None, type=None, required=False, self.is_eager = is_eager self.metavar = metavar self.envvar = envvar + self.autocompletion = autocompletion @property def human_readable_name(self): @@ -1316,10 +1389,10 @@ def add_to_parser(self, parser, ctx): def consume_value(self, ctx, opts): value = opts.get(self.name) - if value is None: - value = ctx.lookup_default(self.name) if value is None: value = self.value_from_envvar(ctx) + if value is None: + value = ctx.lookup_default(self.name) return value def type_cast_value(self, ctx, value): @@ -1364,7 +1437,7 @@ def value_is_missing(self, value): def full_process_value(self, ctx, value): value = self.process_value(ctx, value) - if value is None: + if value is None and not ctx.resilient_parsing: value = self.get_default(ctx) if self.required and self.value_is_missing(value): @@ -1416,6 +1489,13 @@ def get_help_record(self, ctx): def get_usage_pieces(self, ctx): return [] + def get_error_hint(self, ctx): + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return ' / '.join('"%s"' % x for x in hint_list) + class Option(Parameter): """Options are usually optional values on the command line and @@ -1424,10 +1504,15 @@ class Option(Parameter): All other parameters are passed onwards to the parameter constructor. :param show_default: controls if the default value should be shown on the - help page. Normally, defaults are not shown. - :param prompt: if set to `True` or a non empty string then the user will - be prompted for input if not set. If set to `True` the - prompt will be the option name capitalized. + help page. Normally, defaults are not shown. If this + value is a string, it shows the string instead of the + value. This is particularly useful for dynamic options. + :param show_envvar: controls if an environment variable should be shown on + the help page. Normally, environment variables + are not shown. + :param prompt: if set to `True` or a non empty string then the user will be + prompted for input. If set to `True` the prompt will be the + option name capitalized. :param confirmation_prompt: if set then the value will need to be confirmed if it was prompted for. :param hide_input: if this is `True` then the input on the prompt will be @@ -1448,6 +1533,7 @@ class Option(Parameter): variable in case a prefix is defined on the context. :param help: the help string. + :param hidden: hide this option from help outputs. """ param_type_name = 'option' @@ -1455,7 +1541,8 @@ def __init__(self, param_decls=None, show_default=False, prompt=False, confirmation_prompt=False, hide_input=False, is_flag=None, flag_value=None, multiple=False, count=False, allow_from_autoenv=True, - type=None, help=None, **attrs): + type=None, help=None, hidden=False, show_choices=True, + show_envvar=False, **attrs): default_is_missing = attrs.get('default', _missing) is _missing Parameter.__init__(self, param_decls, type=type, **attrs) @@ -1468,6 +1555,7 @@ def __init__(self, param_decls=None, show_default=False, self.prompt = prompt_text self.confirmation_prompt = confirmation_prompt self.hide_input = hide_input + self.hidden = hidden # Flags if is_flag is None: @@ -1500,6 +1588,8 @@ def __init__(self, param_decls=None, show_default=False, self.allow_from_autoenv = allow_from_autoenv self.help = help self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar # Sanity check for stuff we don't support if __debug__: @@ -1548,8 +1638,8 @@ def _parse_decls(self, decls, expose_value): opts.append(decl) if name is None and possible_names: - possible_names.sort(key=lambda x: len(x[0])) - name = possible_names[-1][1].replace('-', '_').lower() + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace('-', '_').lower() if not isidentifier(name): name = None @@ -1595,6 +1685,8 @@ def add_to_parser(self, parser, ctx): parser.add_option(self.opts, **kwargs) def get_help_record(self, ctx): + if self.hidden: + return any_prefix_is_slash = [] def _write_opts(opts): @@ -1611,11 +1703,28 @@ def _write_opts(opts): help = self.help or '' extra = [] + if self.show_envvar: + envvar = self.envvar + if envvar is None: + if self.allow_from_autoenv and \ + ctx.auto_envvar_prefix is not None: + envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) + if envvar is not None: + extra.append('env var: %s' % ( + ', '.join('%s' % d for d in envvar) + if isinstance(envvar, (list, tuple)) + else envvar, )) if self.default is not None and self.show_default: - extra.append('default: %s' % ( - ', '.join('%s' % d for d in self.default) - if isinstance(self.default, (list, tuple)) - else self.default, )) + if isinstance(self.show_default, string_types): + default_string = '({})'.format(self.show_default) + elif isinstance(self.default, (list, tuple)): + default_string = ', '.join('%s' % d for d in self.default) + elif inspect.isfunction(self.default): + default_string = "(dynamic)" + else: + default_string = self.default + extra.append('default: {}'.format(default_string)) + if self.required: extra.append('required') if extra: @@ -1649,8 +1758,8 @@ def prompt_for_value(self, ctx): if self.is_bool_flag: return confirm(self.prompt, default) - return prompt(self.prompt, default=default, - hide_input=self.hide_input, + return prompt(self.prompt, default=default, type=self.type, + hide_input=self.hide_input, show_choices=self.show_choices, confirmation_prompt=self.confirmation_prompt, value_proc=lambda x: self.process_value(ctx, x)) @@ -1710,7 +1819,9 @@ def human_readable_name(self): def make_metavar(self): if self.metavar is not None: return self.metavar - var = self.name.upper() + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() if not self.required: var = '[%s]' % var if self.nargs != 1: @@ -1725,16 +1836,17 @@ def _parse_decls(self, decls, expose_value): if len(decls) == 1: name = arg = decls[0] name = name.replace('-', '_').lower() - elif len(decls) == 2: - name, arg = decls else: - raise TypeError('Arguments take exactly one or two ' - 'parameter declarations, got %d' % len(decls)) + raise TypeError('Arguments take exactly one ' + 'parameter declaration, got %d' % len(decls)) return name, [arg], [] def get_usage_pieces(self, ctx): return [self.make_metavar()] + def get_error_hint(self, ctx): + return '"%s"' % self.make_metavar() + def add_to_parser(self, parser, ctx): parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/pipenv/vendor/click/decorators.py b/pipenv/vendor/click/decorators.py index 9893452650..c57c530861 100644 --- a/pipenv/vendor/click/decorators.py +++ b/pipenv/vendor/click/decorators.py @@ -61,7 +61,7 @@ def new_func(*args, **kwargs): raise RuntimeError('Managed to invoke callback without a ' 'context object of type %r existing' % object_type.__name__) - return ctx.invoke(f, obj, *args[1:], **kwargs) + return ctx.invoke(f, obj, *args, **kwargs) return update_wrapper(new_func, f) return decorator @@ -85,12 +85,12 @@ def _make_command(f, name, attrs, cls): help = inspect.cleandoc(help) attrs['help'] = help _check_for_unicode_literals() - return cls(name=name or f.__name__.lower(), + return cls(name=name or f.__name__.lower().replace('_', '-'), callback=f, params=params, **attrs) def command(name=None, cls=None, **attrs): - """Creates a new :class:`Command` and uses the decorated function as + r"""Creates a new :class:`Command` and uses the decorated function as callback. This will also automatically attach all decorated :func:`option`\s and :func:`argument`\s as parameters to the command. @@ -105,7 +105,7 @@ def command(name=None, cls=None, **attrs): command :class:`Group`. :param name: the name of the command. This defaults to the function - name. + name with underscores replaced by dashes. :param cls: the command class to instantiate. This defaults to :class:`Command`. """ @@ -164,10 +164,13 @@ def option(*param_decls, **attrs): :class:`Option`. """ def decorator(f): - if 'help' in attrs: - attrs['help'] = inspect.cleandoc(attrs['help']) - OptionClass = attrs.pop('cls', Option) - _param_memo(f, OptionClass(param_decls, **attrs)) + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + + if 'help' in option_attrs: + option_attrs['help'] = inspect.cleandoc(option_attrs['help']) + OptionClass = option_attrs.pop('cls', Option) + _param_memo(f, OptionClass(param_decls, **option_attrs)) return f return decorator @@ -235,7 +238,11 @@ def version_option(version=None, *param_decls, **attrs): :param others: everything else is forwarded to :func:`option`. """ if version is None: - module = sys._getframe(1).f_globals.get('__name__') + if hasattr(sys, '_getframe'): + module = sys._getframe(1).f_globals.get('__name__') + else: + module = '' + def decorator(f): prog_name = attrs.pop('prog_name', None) message = attrs.pop('message', '%(prog)s, version %(version)s') diff --git a/pipenv/vendor/click/exceptions.py b/pipenv/vendor/click/exceptions.py index 74a4542bb5..6fa17658cb 100644 --- a/pipenv/vendor/click/exceptions.py +++ b/pipenv/vendor/click/exceptions.py @@ -2,6 +2,12 @@ from .utils import echo +def _join_param_hints(param_hint): + if isinstance(param_hint, (tuple, list)): + return ' / '.join('"%s"' % x for x in param_hint) + return param_hint + + class ClickException(Exception): """An exception that Click can handle and show to the user.""" @@ -9,15 +15,25 @@ class ClickException(Exception): exit_code = 1 def __init__(self, message): + ctor_msg = message if PY2: - if message is not None: - message = message.encode('utf-8') - Exception.__init__(self, message) + if ctor_msg is not None: + ctor_msg = ctor_msg.encode('utf-8') + Exception.__init__(self, ctor_msg) self.message = message def format_message(self): return self.message + def __str__(self): + return self.message + + if PY2: + __unicode__ = __str__ + + def __str__(self): + return self.message.encode('utf-8') + def show(self, file=None): if file is None: file = get_text_stderr() @@ -37,14 +53,20 @@ class UsageError(ClickException): def __init__(self, message, ctx=None): ClickException.__init__(self, message) self.ctx = ctx + self.cmd = self.ctx and self.ctx.command or None def show(self, file=None): if file is None: file = get_text_stderr() color = None + hint = '' + if (self.cmd is not None and + self.cmd.get_help_option(self.ctx) is not None): + hint = ('Try "%s %s" for help.\n' + % (self.ctx.command_path, self.ctx.help_option_names[0])) if self.ctx is not None: color = self.ctx.color - echo(self.ctx.get_usage() + '\n', file=file, color=color) + echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color) echo('Error: %s' % self.format_message(), file=file, color=color) @@ -76,11 +98,11 @@ def format_message(self): if self.param_hint is not None: param_hint = self.param_hint elif self.param is not None: - param_hint = self.param.opts or [self.param.human_readable_name] + param_hint = self.param.get_error_hint(self.ctx) else: return 'Invalid value: %s' % self.message - if isinstance(param_hint, (tuple, list)): - param_hint = ' / '.join('"%s"' % x for x in param_hint) + param_hint = _join_param_hints(param_hint) + return 'Invalid value for %s: %s' % (param_hint, self.message) @@ -105,11 +127,10 @@ def format_message(self): if self.param_hint is not None: param_hint = self.param_hint elif self.param is not None: - param_hint = self.param.opts or [self.param.human_readable_name] + param_hint = self.param.get_error_hint(self.ctx) else: param_hint = None - if isinstance(param_hint, (tuple, list)): - param_hint = ' / '.join('"%s"' % x for x in param_hint) + param_hint = _join_param_hints(param_hint) param_type = self.param_type if param_type is None and self.param is not None: @@ -164,10 +185,13 @@ class BadOptionUsage(UsageError): for an option is not correct. .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. """ - def __init__(self, message, ctx=None): + def __init__(self, option_name, message, ctx=None): UsageError.__init__(self, message, ctx) + self.option_name = option_name class BadArgumentUsage(UsageError): @@ -199,3 +223,13 @@ def format_message(self): class Abort(RuntimeError): """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + def __init__(self, code=0): + self.exit_code = code diff --git a/pipenv/vendor/click/globals.py b/pipenv/vendor/click/globals.py index 14338e6bb8..843b594abe 100644 --- a/pipenv/vendor/click/globals.py +++ b/pipenv/vendor/click/globals.py @@ -9,7 +9,7 @@ def get_current_context(silent=False): access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be - interested in changing it's behavior based on the current context. + interested in changing its behavior based on the current context. To push the current context, :meth:`Context.scope` can be used. diff --git a/pipenv/vendor/click/parser.py b/pipenv/vendor/click/parser.py index 9775c9ff9b..1c3ae9c8ef 100644 --- a/pipenv/vendor/click/parser.py +++ b/pipenv/vendor/click/parser.py @@ -1,20 +1,21 @@ # -*- coding: utf-8 -*- """ - click.parser - ~~~~~~~~~~~~ +click.parser +~~~~~~~~~~~~ - This module started out as largely a copy paste from the stdlib's - optparse module with the features removed that we do not need from - optparse because we implement them in Click on a higher level (for - instance type handling, help formatting and a lot more). +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). - The plan is to remove more and more from here over time. +The plan is to remove more and more from here over time. - The reason this is a different module and not optparse from the stdlib - is that there are differences in 2.x and 3.x about the error messages - generated and optparse in the stdlib uses gettext for no good reason - and might cause us issues. +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. """ + import re from collections import deque from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \ @@ -74,8 +75,8 @@ def _fetch(c): def _error_opt_args(nargs, opt): if nargs == 1: - raise BadOptionUsage('%s option requires an argument' % opt) - raise BadOptionUsage('%s option requires %d arguments' % (opt, nargs)) + raise BadOptionUsage(opt, '%s option requires an argument' % opt) + raise BadOptionUsage(opt, '%s option requires %d arguments' % (opt, nargs)) def split_opt(opt): @@ -321,7 +322,7 @@ def _match_long_opt(self, opt, explicit_value, state): if opt not in self._long_opt: possibilities = [word for word in self._long_opt if word.startswith(opt)] - raise NoSuchOption(opt, possibilities=possibilities) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) option = self._long_opt[opt] if option.takes_value: @@ -342,7 +343,7 @@ def _match_long_opt(self, opt, explicit_value, state): del state.rargs[:nargs] elif explicit_value is not None: - raise BadOptionUsage('%s option does not take a value' % opt) + raise BadOptionUsage(opt, '%s option does not take a value' % opt) else: value = None @@ -364,7 +365,7 @@ def _match_short_opt(self, arg, state): if self.ignore_unknown_options: unknown_options.append(ch) continue - raise NoSuchOption(opt) + raise NoSuchOption(opt, ctx=self.ctx) if option.takes_value: # Any characters left in arg? Pretend they're the # next arg, and stop consuming characters of arg. diff --git a/pipenv/vendor/click/termui.py b/pipenv/vendor/click/termui.py index d9fba52325..bf9a3aa163 100644 --- a/pipenv/vendor/click/termui.py +++ b/pipenv/vendor/click/termui.py @@ -1,12 +1,14 @@ import os import sys import struct +import inspect +import itertools from ._compat import raw_input, text_type, string_types, \ isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN from .utils import echo from .exceptions import Abort, UsageError -from .types import convert_type +from .types import convert_type, Choice, Path from .globals import resolve_color_default @@ -14,8 +16,25 @@ # functions to customize how they work. visible_prompt_func = raw_input -_ansi_colors = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', - 'cyan', 'white', 'reset') +_ansi_colors = { + 'black': 30, + 'red': 31, + 'green': 32, + 'yellow': 33, + 'blue': 34, + 'magenta': 35, + 'cyan': 36, + 'white': 37, + 'reset': 39, + 'bright_black': 90, + 'bright_red': 91, + 'bright_green': 92, + 'bright_yellow': 93, + 'bright_blue': 94, + 'bright_magenta': 95, + 'bright_cyan': 96, + 'bright_white': 97, +} _ansi_reset_all = '\033[0m' @@ -24,23 +43,27 @@ def hidden_prompt_func(prompt): return getpass.getpass(prompt) -def _build_prompt(text, suffix, show_default=False, default=None): +def _build_prompt(text, suffix, show_default=False, default=None, show_choices=True, type=None): prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += ' (' + ", ".join(map(str, type.choices)) + ')' if default is not None and show_default: prompt = '%s [%s]' % (prompt, default) return prompt + suffix -def prompt(text, default=None, hide_input=False, - confirmation_prompt=False, type=None, - value_proc=None, prompt_suffix=': ', - show_default=True, err=False): +def prompt(text, default=None, hide_input=False, confirmation_prompt=False, + type=None, value_proc=None, prompt_suffix=': ', show_default=True, + err=False, show_choices=True): """Prompts a user for input. This is a convenience function that can be used to prompt a user for input later. If the user aborts the input by sending a interrupt signal, this function will catch it and raise a :exc:`Abort` exception. + .. versionadded:: 7.0 + Added the show_choices parameter. + .. versionadded:: 6.0 Added unicode support for cmd.exe on Windows. @@ -61,6 +84,10 @@ def prompt(text, default=None, hide_input=False, :param show_default: shows or hides the default value in the prompt. :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". """ result = None @@ -82,17 +109,18 @@ def prompt_func(text): if value_proc is None: value_proc = convert_type(type, default) - prompt = _build_prompt(text, prompt_suffix, show_default, default) + prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type) while 1: while 1: value = prompt_func(prompt) if value: break - # If a default is set and used, then the confirmation - # prompt is always skipped because that's the only thing - # that really makes sense. elif default is not None: + if isinstance(value_proc, Path): + # validate Path default value(exists, dir_okay etc.) + value = default + break return default try: result = value_proc(value) @@ -166,8 +194,14 @@ def get_terminal_size(): sz = shutil_get_terminal_size() return sz.columns, sz.lines + # We provide a sensible default for get_winterm_size() when being invoked + # inside a subprocess. Without this, it would not provide a useful input. if get_winterm_size is not None: - return get_winterm_size() + size = get_winterm_size() + if size == (0, 0): + return (79, 24) + else: + return size def ioctl_gwinsz(fd): try: @@ -195,22 +229,33 @@ def ioctl_gwinsz(fd): return int(cr[1]), int(cr[0]) -def echo_via_pager(text, color=None): +def echo_via_pager(text_or_generator, color=None): """This function takes a text and shows it via an environment specific pager on stdout. .. versionchanged:: 3.0 Added the `color` flag. - :param text: the text to page. + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. :param color: controls if the pager supports ANSI colors or not. The default is autodetection. """ color = resolve_color_default(color) - if not isinstance(text, string_types): - text = text_type(text) + + if inspect.isgeneratorfunction(text_or_generator): + i = text_or_generator() + elif isinstance(text_or_generator, string_types): + i = [text_or_generator] + else: + i = iter(text_or_generator) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, string_types) else text_type(el) + for el in i) + from ._termui_impl import pager - return pager(text + '\n', color) + return pager(itertools.chain(text_generator, "\n"), color) def progressbar(iterable=None, length=None, label=None, show_eta=True, @@ -347,10 +392,21 @@ def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, * ``magenta`` * ``cyan`` * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` * ``reset`` (reset the color code only) .. versionadded:: 2.0 + .. versionadded:: 7.0 + Added support for bright colors. + :param text: the string to style with ansi codes. :param fg: if provided this will become the foreground color. :param bg: if provided this will become the background color. @@ -369,13 +425,13 @@ def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, bits = [] if fg: try: - bits.append('\033[%dm' % (_ansi_colors.index(fg) + 30)) - except ValueError: + bits.append('\033[%dm' % (_ansi_colors[fg])) + except KeyError: raise TypeError('Unknown color %r' % fg) if bg: try: - bits.append('\033[%dm' % (_ansi_colors.index(bg) + 40)) - except ValueError: + bits.append('\033[%dm' % (_ansi_colors[bg] + 10)) + except KeyError: raise TypeError('Unknown color %r' % bg) if bold is not None: bits.append('\033[%dm' % (1 if bold else 22)) @@ -405,7 +461,7 @@ def unstyle(text): return strip_ansi(text) -def secho(text, file=None, nl=True, err=False, color=None, **styles): +def secho(message=None, file=None, nl=True, err=False, color=None, **styles): """This function combines :func:`echo` and :func:`style` into one call. As such the following two calls are the same:: @@ -417,7 +473,9 @@ def secho(text, file=None, nl=True, err=False, color=None, **styles): .. versionadded:: 2.0 """ - return echo(style(text, **styles), file=file, nl=nl, err=err, color=color) + if message is not None: + message = style(message, **styles) + return echo(message, file=file, nl=nl, err=err, color=color) def edit(text=None, editor=None, env=None, require_save=True, @@ -466,7 +524,7 @@ def launch(url, wait=False, locate=False): Examples:: - click.launch('http://click.pocoo.org/') + click.launch('https://click.palletsprojects.com/') click.launch('/my/downloaded/file', locate=True) .. versionadded:: 2.0 @@ -499,6 +557,10 @@ def getchar(echo=False): Note that this will always read from the terminal, even if something is piped into the standard input. + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + .. versionadded:: 2.0 :param echo: if set to `True`, the character read will also show up on @@ -510,6 +572,11 @@ def getchar(echo=False): return f(echo) +def raw_terminal(): + from ._termui_impl import raw_terminal as f + return f() + + def pause(info='Press any key to continue ...', err=False): """This command stops execution and waits for the user to press any key to continue. This is similar to the Windows batch "pause" diff --git a/pipenv/vendor/click/testing.py b/pipenv/vendor/click/testing.py index 4416c77413..1b2924e0b1 100644 --- a/pipenv/vendor/click/testing.py +++ b/pipenv/vendor/click/testing.py @@ -3,8 +3,9 @@ import shutil import tempfile import contextlib +import shlex -from ._compat import iteritems, PY2 +from ._compat import iteritems, PY2, string_types # If someone wants to vendor click, we want to ensure the @@ -72,27 +73,44 @@ def make_input_stream(input, charset): class Result(object): """Holds the captured result of an invoked CLI script.""" - def __init__(self, runner, output_bytes, exit_code, exception, - exc_info=None): + def __init__(self, runner, stdout_bytes, stderr_bytes, exit_code, + exception, exc_info=None): #: The runner that created the result self.runner = runner - #: The output as bytes. - self.output_bytes = output_bytes + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or False(y) if not available + self.stderr_bytes = stderr_bytes #: The exit code as integer. self.exit_code = exit_code - #: The exception that happend if one did. + #: The exception that happened if one did. self.exception = exception #: The traceback self.exc_info = exc_info @property def output(self): - """The output as unicode string.""" - return self.output_bytes.decode(self.runner.charset, 'replace') \ + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self): + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, 'replace') \ + .replace('\r\n', '\n') + + @property + def stderr(self): + """The standard error as unicode string.""" + if not self.stderr_bytes: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, 'replace') \ .replace('\r\n', '\n') + def __repr__(self): - return '' % ( + return '<%s %s>' % ( + type(self).__name__, self.exception and repr(self.exception) or 'okay', ) @@ -111,14 +129,21 @@ class CliRunner(object): to stdout. This is useful for showing examples in some circumstances. Note that regular prompts will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently """ - def __init__(self, charset=None, env=None, echo_stdin=False): + def __init__(self, charset=None, env=None, echo_stdin=False, + mix_stderr=True): if charset is None: charset = 'utf-8' self.charset = charset self.env = env or {} self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr def get_default_prog_name(self, cli): """Given a command object it will return the default program name @@ -163,16 +188,27 @@ def isolation(self, input=None, env=None, color=False): env = self.make_env(env) if PY2: - sys.stdout = sys.stderr = bytes_output = StringIO() + bytes_output = StringIO() if self.echo_stdin: input = EchoingStdin(input, bytes_output) + sys.stdout = bytes_output + if not self.mix_stderr: + bytes_error = StringIO() + sys.stderr = bytes_error else: bytes_output = io.BytesIO() if self.echo_stdin: input = EchoingStdin(input, bytes_output) input = io.TextIOWrapper(input, encoding=self.charset) - sys.stdout = sys.stderr = io.TextIOWrapper( + sys.stdout = io.TextIOWrapper( bytes_output, encoding=self.charset) + if not self.mix_stderr: + bytes_error = io.BytesIO() + sys.stderr = io.TextIOWrapper( + bytes_error, encoding=self.charset) + + if self.mix_stderr: + sys.stderr = sys.stdout sys.stdin = input @@ -196,6 +232,7 @@ def _getchar(echo): return char default_color = color + def should_strip_ansi(stream=None, color=None): if color is None: return not default_color @@ -221,7 +258,7 @@ def should_strip_ansi(stream=None, color=None): pass else: os.environ[key] = value - yield bytes_output + yield (bytes_output, not self.mix_stderr and bytes_error) finally: for key, value in iteritems(old_env): if value is None: @@ -241,7 +278,7 @@ def should_strip_ansi(stream=None, color=None): clickpkg.formatting.FORCED_WIDTH = old_forced_width def invoke(self, cli, args=None, input=None, env=None, - catch_exceptions=True, color=False, **extra): + catch_exceptions=True, color=False, mix_stderr=False, **extra): """Invokes a command in an isolated environment. The arguments are forwarded directly to the command line script, the `extra` keyword arguments are passed to the :meth:`~clickpkg.Command.main` function of @@ -260,7 +297,10 @@ def invoke(self, cli, args=None, input=None, env=None, The ``color`` parameter was added. :param cli: the command to invoke - :param args: the arguments to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. :param input: the input data for `sys.stdin`. :param env: the environment overrides. :param catch_exceptions: Whether to catch any other exceptions than @@ -270,36 +310,48 @@ def invoke(self, cli, args=None, input=None, env=None, application can still override this explicitly. """ exc_info = None - with self.isolation(input=input, env=env, color=color) as out: + with self.isolation(input=input, env=env, color=color) as outstreams: exception = None exit_code = 0 + if isinstance(args, string_types): + args = shlex.split(args) + try: - cli.main(args=args or (), - prog_name=self.get_default_prog_name(cli), **extra) - except SystemExit as e: - if e.code != 0: - exception = e + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + try: + cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: exc_info = sys.exc_info() - exit_code = e.code + if exit_code is None: + exit_code = 0 + + if exit_code != 0: + exception = e + if not isinstance(exit_code, int): sys.stdout.write(str(exit_code)) sys.stdout.write('\n') exit_code = 1 + except Exception as e: if not catch_exceptions: raise exception = e - exit_code = -1 + exit_code = 1 exc_info = sys.exc_info() finally: sys.stdout.flush() - output = out.getvalue() + stdout = outstreams[0].getvalue() + stderr = outstreams[1] and outstreams[1].getvalue() return Result(runner=self, - output_bytes=output, + stdout_bytes=stdout, + stderr_bytes=stderr, exit_code=exit_code, exception=exception, exc_info=exc_info) diff --git a/pipenv/vendor/click/types.py b/pipenv/vendor/click/types.py index 36390026dc..1f88032f54 100644 --- a/pipenv/vendor/click/types.py +++ b/pipenv/vendor/click/types.py @@ -1,5 +1,6 @@ import os import stat +from datetime import datetime from ._compat import open_stream, text_type, filename_to_ui, \ get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2 @@ -126,34 +127,54 @@ def __repr__(self): class Choice(ParamType): - """The choice type allows a value to be checked against a fixed set of - supported values. All of these values have to be strings. + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. """ + name = 'choice' - def __init__(self, choices): + def __init__(self, choices, case_sensitive=True): self.choices = choices + self.case_sensitive = case_sensitive def get_metavar(self, param): return '[%s]' % '|'.join(self.choices) def get_missing_message(self, param): - return 'Choose from %s.' % ', '.join(self.choices) + return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices) def convert(self, value, param, ctx): # Exact match if value in self.choices: return value - # Match through normalization + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = self.choices + if ctx is not None and \ ctx.token_normalize_func is not None: - value = ctx.token_normalize_func(value) - for choice in self.choices: - if ctx.token_normalize_func(choice) == value: - return choice + normed_value = ctx.token_normalize_func(value) + normed_choices = [ctx.token_normalize_func(choice) for choice in + self.choices] + + if not self.case_sensitive: + normed_value = normed_value.lower() + normed_choices = [choice.lower() for choice in normed_choices] + + if normed_value in normed_choices: + return normed_value self.fail('invalid choice: %s. (choose from %s)' % (value, ', '.join(self.choices)), param, ctx) @@ -162,6 +183,59 @@ def __repr__(self): return 'Choice(%r)' % list(self.choices) +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + name = 'datetime' + + def __init__(self, formats=None): + self.formats = formats or [ + '%Y-%m-%d', + '%Y-%m-%dT%H:%M:%S', + '%Y-%m-%d %H:%M:%S' + ] + + def get_metavar(self, param): + return '[{}]'.format('|'.join(self.formats)) + + def _try_to_convert_date(self, value, format): + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert(self, value, param, ctx): + # Exact match + for format in self.formats: + dtime = self._try_to_convert_date(value, format) + if dtime: + return dtime + + self.fail( + 'invalid datetime format: {}. (choose from {})'.format( + value, ', '.join(self.formats))) + + def __repr__(self): + return 'DateTime' + + class IntParamType(ParamType): name = 'integer' @@ -214,6 +288,59 @@ def __repr__(self): return 'IntRange(%r, %r)' % (self.min, self.max) +class FloatParamType(ParamType): + name = 'float' + + def convert(self, value, param, ctx): + try: + return float(value) + except (UnicodeError, ValueError): + self.fail('%s is not a valid floating point value' % + value, param, ctx) + + def __repr__(self): + return 'FLOAT' + + +class FloatRange(FloatParamType): + """A parameter that works similar to :data:`click.FLOAT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + name = 'float range' + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = FloatParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if self.min is not None and rv < self.min or \ + self.max is not None and rv > self.max: + if self.min is None: + self.fail('%s is bigger than the maximum valid value ' + '%s.' % (rv, self.max), param, ctx) + elif self.max is None: + self.fail('%s is smaller than the minimum valid value ' + '%s.' % (rv, self.min), param, ctx) + else: + self.fail('%s is not in the valid range of %s to %s.' + % (rv, self.min, self.max), param, ctx) + return rv + + def __repr__(self): + return 'FloatRange(%r, %r)' % (self.min, self.max) + + class BoolParamType(ParamType): name = 'boolean' @@ -221,9 +348,9 @@ def convert(self, value, param, ctx): if isinstance(value, bool): return bool(value) value = value.lower() - if value in ('true', '1', 'yes', 'y'): + if value in ('true', 't', '1', 'yes', 'y'): return True - elif value in ('false', '0', 'no', 'n'): + elif value in ('false', 'f', '0', 'no', 'n'): return False self.fail('%s is not a valid boolean' % value, param, ctx) @@ -231,20 +358,6 @@ def __repr__(self): return 'BOOL' -class FloatParamType(ParamType): - name = 'float' - - def convert(self, value, param, ctx): - try: - return float(value) - except (UnicodeError, ValueError): - self.fail('%s is not a valid floating point value' % - value, param, ctx) - - def __repr__(self): - return 'FLOAT' - - class UUIDParameterType(ParamType): name = 'uuid' @@ -273,9 +386,12 @@ class File(ParamType): opened in binary mode or for writing. The encoding parameter can be used to force a specific encoding. - The `lazy` flag controls if the file should be opened immediately or - upon first IO. The default is to be non lazy for standard input and - output streams as well as files opened for reading, lazy otherwise. + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. Starting with Click 2.0, files can also be opened atomically in which case all writes go into a separate file in the same folder and upon @@ -358,14 +474,16 @@ class Path(ParamType): :param readable: if true, a readable check is performed. :param resolve_path: if this is true, then the path is fully resolved before the value is passed onwards. This means - that it's absolute and symlinks are resolved. + that it's absolute and symlinks are resolved. It + will not expand a tilde-prefix, as this is + supposed to be done by the shell only. :param allow_dash: If this is set to `True`, a single dash to indicate standard streams is permitted. - :param type: optionally a string type that should be used to - represent the path. The default is `None` which - means the return value will be either bytes or - unicode depending on what makes most sense given the - input data Click deals with. + :param path_type: optionally a string type that should be used to + represent the path. The default is `None` which + means the return value will be either bytes or + unicode depending on what makes most sense given the + input data Click deals with. """ envvar_list_splitter = os.path.pathsep @@ -384,7 +502,7 @@ def __init__(self, exists=False, file_okay=True, dir_okay=True, if self.file_okay and not self.dir_okay: self.name = 'file' self.path_type = 'File' - if self.dir_okay and not self.file_okay: + elif self.dir_okay and not self.file_okay: self.name = 'directory' self.path_type = 'Directory' else: diff --git a/pipenv/vendor/click/utils.py b/pipenv/vendor/click/utils.py index eee626d3fd..fc84369fc9 100644 --- a/pipenv/vendor/click/utils.py +++ b/pipenv/vendor/click/utils.py @@ -43,6 +43,7 @@ def make_str(value): def make_default_short_help(help, max_length=45): + """Return a condensed version of help string.""" words = help.split() total_length = 0 result = [] @@ -171,7 +172,7 @@ def echo(message=None, file=None, nl=True, err=False, color=None): Primarily it means that you can print binary data as well as Unicode data on both 2.x and 3.x to the given file in the most appropriate way - possible. This is a very carefree function as in that it will try its + possible. This is a very carefree function in that it will try its best to not fail. As of Click 6.0 this includes support for unicode output on the Windows console. @@ -183,7 +184,7 @@ def echo(message=None, file=None, nl=True, err=False, color=None): - hide ANSI codes automatically if the destination file is not a terminal. - .. _colorama: http://pypi.python.org/pypi/colorama + .. _colorama: https://pypi.org/project/colorama/ .. versionchanged:: 6.0 As of Click 6.0 the echo function will properly support unicode @@ -413,3 +414,27 @@ def get_app_dir(app_name, roaming=True, force_posix=False): return os.path.join( os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), _posixify(app_name)) + + +class PacifyFlushWrapper(object): + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + + def flush(self): + try: + self.wrapped.flush() + except IOError as e: + import errno + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr): + return getattr(self.wrapped, attr) diff --git a/pipenv/vendor/delegator.py b/pipenv/vendor/delegator.py index 5820db7b83..d15aeb9783 100644 --- a/pipenv/vendor/delegator.py +++ b/pipenv/vendor/delegator.py @@ -4,6 +4,7 @@ import signal import sys import locale +import errno from pexpect.popen_spawn import PopenSpawn @@ -11,12 +12,40 @@ try: STR_TYPES = (str, unicode) except NameError: - STR_TYPES = (str, ) + STR_TYPES = (str,) TIMEOUT = 30 -class Command(object): +def pid_exists(pid): + """Check whether pid exists in the current process table.""" + if pid == 0: + # According to "man 2 kill" PID 0 has a special meaning: + # it refers to <> so we don't want to go any further. + # If we get here it means this UNIX platform *does* have + # a process with id 0. + return True + try: + os.kill(pid, 0) + except OSError as err: + if err.errno == errno.ESRCH: + # ESRCH == No such process + return False + elif err.errno == errno.EPERM: + # EPERM clearly means there's a process to deny access to + return True + else: + # According to "man 2 kill" possible error values are + # (EINVAL, EPERM, ESRCH) therefore we should never get + # here. If we do let's be explicit in considering this + # an error. + raise err + else: + return True + + +class Command(object): def __init__(self, cmd, timeout=TIMEOUT): super(Command, self).__init__() self.cmd = cmd @@ -28,7 +57,7 @@ def __init__(self, cmd, timeout=TIMEOUT): self.__err = None def __repr__(self): - return ''.format(self.cmd) + return "".format(self.cmd) @property def _popen_args(self): @@ -37,27 +66,23 @@ def _popen_args(self): @property def _default_popen_kwargs(self): return { - 'env': os.environ.copy(), - 'stdin': subprocess.PIPE, - 'stdout': subprocess.PIPE, - 'stderr': subprocess.PIPE, - 'shell': True, - 'universal_newlines': True, - 'bufsize': 0 + "env": os.environ.copy(), + "stdin": subprocess.PIPE, + "stdout": subprocess.PIPE, + "stderr": subprocess.PIPE, + "shell": True, + "universal_newlines": True, + "bufsize": 0, } @property def _default_pexpect_kwargs(self): - encoding = 'utf-8' - if sys.platform == 'win32': + encoding = "utf-8" + if sys.platform == "win32": default_encoding = locale.getdefaultlocale()[1] if default_encoding is not None: encoding = default_encoding - return { - 'env': os.environ.copy(), - 'encoding': encoding, - 'timeout': self.timeout - } + return {"env": os.environ.copy(), "encoding": encoding, "timeout": self.timeout} @property def _uses_subprocess(self): @@ -71,12 +96,16 @@ def _uses_pexpect(self): def std_out(self): return self.subprocess.stdout + @property + def ok(self): + return self.return_code == 0 + @property def _pexpect_out(self): if self.subprocess.encoding: - result = '' + result = "" else: - result = b'' + result = b"" if self.subprocess.before: result += self.subprocess.before @@ -120,11 +149,16 @@ def err(self): def pid(self): """The process' PID.""" # Support for pexpect's functionality. - if hasattr(self.subprocess, 'proc'): + if hasattr(self.subprocess, "proc"): return self.subprocess.proc.pid # Standard subprocess method. return self.subprocess.pid + @property + def is_alive(self): + """Is the process alive?""" + return pid_exists(self.pid) + @property def return_code(self): # Support for pexpect's functionality. @@ -144,23 +178,23 @@ def run(self, block=True, binary=False, cwd=None, env=None): # Use subprocess. if self.blocking: popen_kwargs = self._default_popen_kwargs.copy() - popen_kwargs['universal_newlines'] = not binary + popen_kwargs["universal_newlines"] = not binary if cwd: - popen_kwargs['cwd'] = cwd + popen_kwargs["cwd"] = cwd if env: - popen_kwargs['env'].update(env) + popen_kwargs["env"].update(env) s = subprocess.Popen(self._popen_args, **popen_kwargs) # Otherwise, use pexpect. else: pexpect_kwargs = self._default_pexpect_kwargs.copy() if binary: - pexpect_kwargs['encoding'] = None + pexpect_kwargs["encoding"] = None if cwd: - pexpect_kwargs['cwd'] = cwd + pexpect_kwargs["cwd"] = cwd if env: - pexpect_kwargs['env'].update(env) + pexpect_kwargs["env"].update(env) # Enable Python subprocesses to work with expect functionality. - pexpect_kwargs['env']['PYTHONUNBUFFERED'] = '1' + pexpect_kwargs["env"]["PYTHONUNBUFFERED"] = "1" s = PopenSpawn(self._popen_args, **pexpect_kwargs) self.subprocess = s self.was_run = True @@ -169,7 +203,7 @@ def expect(self, pattern, timeout=-1): """Waits on the given pattern to appear in std_out""" if self.blocking: - raise RuntimeError('expect can only be used on non-blocking commands.') + raise RuntimeError("expect can only be used on non-blocking commands.") self.subprocess.expect(pattern=pattern, timeout=timeout) @@ -177,7 +211,7 @@ def send(self, s, end=os.linesep, signal=False): """Sends the given string or signal to std_in.""" if self.blocking: - raise RuntimeError('send can only be used on non-blocking commands.') + raise RuntimeError("send can only be used on non-blocking commands.") if not signal: if self._uses_subprocess: @@ -191,7 +225,10 @@ def terminate(self): self.subprocess.terminate() def kill(self): - self.subprocess.kill(signal.SIGINT) + if self._uses_pexpect: + self.subprocess.kill(signal.SIGINT) + else: + self.subprocess.send_signal(signal.SIGINT) def block(self): """Blocks until process is complete.""" @@ -237,12 +274,12 @@ def _expand_args(command): # Prepare arguments. if isinstance(command, STR_TYPES): if sys.version_info[0] == 2: - splitter = shlex.shlex(command.encode('utf-8')) + splitter = shlex.shlex(command.encode("utf-8")) elif sys.version_info[0] == 3: splitter = shlex.shlex(command) else: - splitter = shlex.shlex(command.encode('utf-8')) - splitter.whitespace = '|' + splitter = shlex.shlex(command.encode("utf-8")) + splitter.whitespace = "|" splitter.whitespace_split = True command = [] @@ -283,4 +320,3 @@ def run(command, block=True, binary=False, timeout=TIMEOUT, cwd=None, env=None): c.block() return c - diff --git a/pipenv/vendor/distlib/__init__.py b/pipenv/vendor/distlib/__init__.py index d4aab453ae..a786b4d3b7 100644 --- a/pipenv/vendor/distlib/__init__.py +++ b/pipenv/vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.2.7' +__version__ = '0.2.8' class DistlibException(Exception): pass diff --git a/pipenv/vendor/distlib/database.py b/pipenv/vendor/distlib/database.py index a19905e215..b13cdac92b 100644 --- a/pipenv/vendor/distlib/database.py +++ b/pipenv/vendor/distlib/database.py @@ -20,7 +20,8 @@ from . import DistlibException, resources from .compat import StringIO from .version import get_scheme, UnsupportedVersionError -from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, CSVWriter) @@ -132,7 +133,9 @@ def _yield_distributions(self): if not r or r.path in seen: continue if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] for metadata_filename in possible_filenames: metadata_path = posixpath.join(entry, metadata_filename) pydist = finder.find(metadata_path) diff --git a/pipenv/vendor/distlib/locators.py b/pipenv/vendor/distlib/locators.py index 11d26361c9..5c655c3e51 100644 --- a/pipenv/vendor/distlib/locators.py +++ b/pipenv/vendor/distlib/locators.py @@ -255,7 +255,9 @@ def same_project(name1, name2): if path.endswith('.whl'): try: wheel = Wheel(path) - if is_compatible(wheel, self.wheel_tags): + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: if project_name is None: include = True else: @@ -613,6 +615,7 @@ def __init__(self, url, timeout=None, num_workers=10, **kwargs): # as it is for coordinating our internal threads - the ones created # in _prepare_threads. self._gplock = threading.RLock() + self.platform_check = False # See issue #112 def _prepare_threads(self): """ @@ -658,8 +661,8 @@ def _get_project(self, name): del self.result return result - platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' - r'win(32|-amd64)|macosx-?\d+)\b', re.I) + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) def _is_platform_dependent(self, url): """ @@ -677,7 +680,7 @@ def _process_download(self, url): Note that the return value isn't actually used other than as a boolean value. """ - if self._is_platform_dependent(url): + if self.platform_check and self._is_platform_dependent(url): info = None else: info = self.convert_url_to_download_info(url, self.project_name) diff --git a/pipenv/vendor/distlib/metadata.py b/pipenv/vendor/distlib/metadata.py index 6d6470fff8..77eed7f968 100644 --- a/pipenv/vendor/distlib/metadata.py +++ b/pipenv/vendor/distlib/metadata.py @@ -91,7 +91,9 @@ class MetadataInvalidError(DistlibException): _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') -_566_FIELDS = _426_FIELDS + ('Description-Content-Type',) +# See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include +# it in the tuple literal below to allow it (for now) +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires') _566_MARKERS = ('Description-Content-Type',) @@ -377,8 +379,8 @@ def read_file(self, fileob): value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) - logger.debug('Attempting to set metadata for %s', self) - self.set_metadata_version() + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() def write(self, filepath, skip_unknown=False): """Write the metadata fields to filepath.""" @@ -648,6 +650,7 @@ def __repr__(self): METADATA_FILENAME = 'pydist.json' WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' class Metadata(object): diff --git a/pipenv/vendor/distlib/scripts.py b/pipenv/vendor/distlib/scripts.py index 0b7c3d0b36..8e22cb9163 100644 --- a/pipenv/vendor/distlib/scripts.py +++ b/pipenv/vendor/distlib/scripts.py @@ -236,8 +236,10 @@ def get_manifest(self, exename): def _write_script(self, names, shebang, script_bytes, filenames, ext): use_launcher = self.add_launchers and self._is_nt linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep if not use_launcher: - script_bytes = shebang + linesep + script_bytes + script_bytes = shebang + script_bytes else: # pragma: no cover if ext == 'py': launcher = self._get_launcher('t') @@ -247,7 +249,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): with ZipFile(stream, 'w') as zf: zf.writestr('__main__.py', script_bytes) zip_data = stream.getvalue() - script_bytes = launcher + shebang + linesep + zip_data + script_bytes = launcher + shebang + zip_data for name in names: outname = os.path.join(self.target_dir, name) if use_launcher: # pragma: no cover diff --git a/pipenv/vendor/distlib/util.py b/pipenv/vendor/distlib/util.py index 0b14a93b31..9d4bfd3bec 100644 --- a/pipenv/vendor/distlib/util.py +++ b/pipenv/vendor/distlib/util.py @@ -545,16 +545,14 @@ def copy_stream(self, instream, outfile, encoding=None): def write_binary_file(self, path, data): self.ensure_dir(os.path.dirname(path)) if not self.dry_run: + if os.path.exists(path): + os.remove(path) with open(path, 'wb') as f: f.write(data) self.record_as_written(path) def write_text_file(self, path, data, encoding): - self.ensure_dir(os.path.dirname(path)) - if not self.dry_run: - with open(path, 'wb') as f: - f.write(data.encode(encoding)) - self.record_as_written(path) + self.write_binary_file(path, data.encode(encoding)) def set_mode(self, bits, mask, files): if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): @@ -582,7 +580,7 @@ def ensure_dir(self, path): if self.record: self.dirs_created.add(path) - def byte_compile(self, path, optimize=False, force=False, prefix=None): + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): dpath = cache_from_source(path, not optimize) logger.info('Byte-compiling %s to %s', path, dpath) if not self.dry_run: @@ -592,7 +590,10 @@ def byte_compile(self, path, optimize=False, force=False, prefix=None): else: assert path.startswith(prefix) diagpath = path[len(prefix):] - py_compile.compile(path, dpath, diagpath, True) # raise error + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error self.record_as_written(dpath) return dpath diff --git a/pipenv/vendor/distlib/wheel.py b/pipenv/vendor/distlib/wheel.py index 77372235c0..b04bfaefe9 100644 --- a/pipenv/vendor/distlib/wheel.py +++ b/pipenv/vendor/distlib/wheel.py @@ -442,7 +442,9 @@ def install(self, paths, maker, **kwargs): This can be used to issue any warnings to raise any exceptions. If kwarg ``lib_only`` is True, only the purelib/platlib files are installed, and the headers, scripts, data and dist-info metadata are - not written. + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). The return value is a :class:`InstalledDistribution` instance unless ``options.lib_only`` is True, in which case the return value is ``None``. @@ -451,6 +453,7 @@ def install(self, paths, maker, **kwargs): dry_run = maker.dry_run warner = kwargs.get('warner') lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -557,7 +560,8 @@ def install(self, paths, maker, **kwargs): '%s' % outfile) if bc and outfile.endswith('.py'): try: - pyc = fileop.byte_compile(outfile) + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) outfiles.append(pyc) except Exception: # Don't give up if byte-compilation fails, diff --git a/pipenv/vendor/modutil.py b/pipenv/vendor/modutil.py deleted file mode 100644 index d68f4851f5..0000000000 --- a/pipenv/vendor/modutil.py +++ /dev/null @@ -1,145 +0,0 @@ -"""Help for working with modules.""" -__version__ = "2.0.0" - -import importlib -import importlib.machinery -import importlib.util -import types - - -STANDARD_MODULE_ATTRS = frozenset(['__all__', '__builtins__', '__cached__', - '__doc__', '__file__', '__loader__', - '__name__', '__package__', '__spec__', - '__getattr__']) - - -class ModuleAttributeError(AttributeError): - """An AttributeError specifically for modules. - - The module_name and 'attribute' attributes are set to strings representing - the module the attribute was searched on and the missing attribute, - respectively. - - """ - - def __init__(self, module_name, attribute): - self.module_name = module_name - self.attribute = attribute - super().__init__(f"module {module_name!r} has no attribute {attribute!r}") - - - -def lazy_import(module_name, to_import): - """Return the importing module and a callable for lazy importing. - - The module named by module_name represents the module performing the - import to help facilitate resolving relative imports. - - to_import is an iterable of the modules to be potentially imported (absolute - or relative). The `as` form of importing is also supported, - e.g. `pkg.mod as spam`. - - This function returns a tuple of two items. The first is the importer - module for easy reference within itself. The second item is a callable to be - set to `__getattr__`. - """ - module = importlib.import_module(module_name) - import_mapping = {} - for name in to_import: - importing, _, binding = name.partition(' as ') - if not binding: - _, _, binding = importing.rpartition('.') - import_mapping[binding] = importing - - def __getattr__(name): - if name not in import_mapping: - raise ModuleAttributeError(module_name, name) - importing = import_mapping[name] - # imortlib.import_module() implicitly sets submodules on this module as - # appropriate for direct imports. - imported = importlib.import_module(importing, - module.__spec__.parent) - setattr(module, name, imported) - return imported - - return module, __getattr__ - - -def filtered_attrs(module, *, modules=False, private=False, dunder=False, - common=False): - """Return a collection of attributes on 'module'. - - If 'modules' is false then module instances are excluded. If 'private' is - false then attributes starting with, but not ending in, '_' will be - excluded. With 'dunder' set to false then attributes starting and ending - with '_' are left out. The 'common' argument controls whether attributes - found in STANDARD_MODULE_ATTRS are returned. - - """ - attr_names = set() - for name, value in module.__dict__.items(): - if not common and name in STANDARD_MODULE_ATTRS: - continue - if name.startswith('_'): - if name.endswith('_'): - if not dunder: - continue - elif not private: - continue - if not modules and isinstance(value, types.ModuleType): - continue - attr_names.add(name) - return frozenset(attr_names) - - -def calc___all__(module_name, **kwargs): - """Return a sorted list of defined attributes on 'module_name'. - - All values specified in **kwargs are directly passed to filtered_attrs(). - - """ - module = importlib.import_module(module_name) - return sorted(filtered_attrs(module, **kwargs)) - - -def filtered_dir(module_name, *, additions={}, **kwargs): - """Return a callable appropriate for __dir__(). - - All values specified in **kwargs get passed directly to filtered_attrs(). - The 'additions' argument should be an iterable which is added to the final - results. - - """ - module = importlib.import_module(module_name) - - def __dir__(): - attr_names = set(filtered_attrs(module, **kwargs)) - attr_names.update(additions) - return sorted(attr_names) - - return __dir__ - - -def chained___getattr__(module_name, *getattrs): - """Create a callable which calls each __getattr__ in sequence. - - Any raised ModuleAttributeError which matches module_name and the - attribute being searched for will be caught and the search will continue. - All other exceptions will be allowed to propagate. If no callable - successfully returns a value, ModuleAttributeError will be raised. - - """ - def __getattr__(name): - """Call each __getattr__ function in sequence.""" - for getattr_ in getattrs: - try: - return getattr_(name) - except ModuleAttributeError as exc: - if exc.module_name == module_name and exc.attribute == name: - continue - else: - raise - else: - raise ModuleAttributeError(module_name, name) - - return __getattr__ diff --git a/pipenv/vendor/packaging/__about__.py b/pipenv/vendor/packaging/__about__.py index 4255c5b553..21fc6ce3e7 100644 --- a/pipenv/vendor/packaging/__about__.py +++ b/pipenv/vendor/packaging/__about__.py @@ -12,10 +12,10 @@ __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "17.1" +__version__ = "18.0" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" __license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2014-2016 %s" % __author__ +__copyright__ = "Copyright 2014-2018 %s" % __author__ diff --git a/pipenv/vendor/packaging/requirements.py b/pipenv/vendor/packaging/requirements.py index f87c57cc80..e8008a6ddd 100644 --- a/pipenv/vendor/packaging/requirements.py +++ b/pipenv/vendor/packaging/requirements.py @@ -92,16 +92,16 @@ def __init__(self, requirement_string): try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: - raise InvalidRequirement( - "Invalid requirement, parse error at \"{0!r}\"".format( - requirement_string[e.loc:e.loc + 8])) + raise InvalidRequirement("Parse error at \"{0!r}\": {1}".format( + requirement_string[e.loc:e.loc + 8], e.msg + )) self.name = req.name if req.url: parsed_url = urlparse.urlparse(req.url) if not (parsed_url.scheme and parsed_url.netloc) or ( not parsed_url.scheme and not parsed_url.netloc): - raise InvalidRequirement("Invalid URL given") + raise InvalidRequirement("Invalid URL: {0}".format(req.url)) self.url = req.url else: self.url = None diff --git a/pipenv/vendor/packaging/specifiers.py b/pipenv/vendor/packaging/specifiers.py index 9b6353f052..4c798999d0 100644 --- a/pipenv/vendor/packaging/specifiers.py +++ b/pipenv/vendor/packaging/specifiers.py @@ -503,7 +503,7 @@ def _compare_greater_than(self, prospective, spec): return False # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is techincally greater than, to match. + # in the specifier, which is technically greater than, to match. if prospective.local is not None: if Version(prospective.base_version) == Version(spec.base_version): return False diff --git a/pipenv/vendor/passa/LICENSE b/pipenv/vendor/passa/LICENSE index e1a278e7b3..cd41e27231 100644 --- a/pipenv/vendor/passa/LICENSE +++ b/pipenv/vendor/passa/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2018, Dan Ryan +Copyright (c) 2018, Dan Ryan and Tzu-ping Chung Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/pipenv/vendor/passa/actions/__init__.py b/pipenv/vendor/passa/actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pipenv/vendor/passa/actions/add.py b/pipenv/vendor/passa/actions/add.py new file mode 100644 index 0000000000..633846675c --- /dev/null +++ b/pipenv/vendor/passa/actions/add.py @@ -0,0 +1,57 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import itertools +import sys + + +def add_packages(packages=[], editables=[], project=None, dev=False, sync=False, clean=False): + from passa.models.lockers import PinReuseLocker + from passa.operations.lock import lock + + lines = list(itertools.chain( + packages, + ("-e {}".format(e) for e in editables), + )) + + project = project + for line in lines: + try: + project.add_line_to_pipfile(line, develop=dev) + except (TypeError, ValueError) as e: + print("Cannot add {line!r} to Pipfile: {error}".format( + line=line, error=str(e), + ), file=sys.stderr) + return 2 + + prev_lockfile = project.lockfile + + locker = PinReuseLocker(project) + success = lock(locker) + if not success: + return 1 + + project._p.write() + project._l.write() + print("Written to project at", project.root) + + if not sync: + return + + from passa.models.synchronizers import Synchronizer + from passa.operations.sync import sync + + lockfile_diff = project.difference_lockfile(prev_lockfile) + default = any(lockfile_diff.default) + develop = any(lockfile_diff.develop) + + syncer = Synchronizer( + project, default=default, develop=develop, + clean_unneeded=clean, + ) + success = sync(syncer) + if not success: + return 1 + + print("Synchronized project at", project.root) diff --git a/pipenv/vendor/passa/actions/clean.py b/pipenv/vendor/passa/actions/clean.py new file mode 100644 index 0000000000..3570e4dba2 --- /dev/null +++ b/pipenv/vendor/passa/actions/clean.py @@ -0,0 +1,16 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + + +def clean(project, dev=False): + from passa.models.synchronizers import Cleaner + from passa.operations.sync import clean + + cleaner = Cleaner(project, default=True, develop=dev) + + success = clean(cleaner) + if not success: + return 1 + + print("Cleaned project at", project.root) diff --git a/pipenv/vendor/passa/actions/freeze.py b/pipenv/vendor/passa/actions/freeze.py new file mode 100644 index 0000000000..ca4dbb2aef --- /dev/null +++ b/pipenv/vendor/passa/actions/freeze.py @@ -0,0 +1,93 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import contextlib +import io +import itertools +import sys + +import vistir.misc + + +def _source_as_lines(source, extra): + url = source["url"] + if extra: + lines = ["--extra-index-url {}".format(url)] + else: + lines = ["--index-url {}".format(url)] + if not source.get("verify_ssl", True): + lines = ["--trusted-host {}".format(url)] + return lines + + +def _requirement_as_line(requirement, sources, include_hashes): + if requirement.index: + sources = sources + else: + sources = None + line = vistir.misc.to_text( + requirement.as_line(sources=sources, include_hashes=include_hashes) + ) + return line + + +@contextlib.contextmanager +def open_for_output(filename): + if filename is None: + yield sys.stdout + return + with io.open(filename, "w", encoding="utf-8", newline="\n") as f: + yield f + + +def freeze(project=None, default=True, dev=True, include_hashes=None, target=None): + from requirementslib import Requirement + + lockfile = project.lockfile + if not lockfile: + print("Pipfile.lock is required to export.", file=sys.stderr) + return 1 + + section_names = [] + if default: + section_names.append("default") + if dev: + section_names.append("develop") + requirements = [ + Requirement.from_pipfile(key, entry._data) + for key, entry in itertools.chain.from_iterable( + lockfile.get(name, {}).items() + for name in section_names + ) + ] + + if include_hashes is None: + include_hashes = all(r.is_named for r in requirements) + + sources = lockfile.meta.sources._data + + source_lines = list(vistir.misc.dedup(itertools.chain( + itertools.chain.from_iterable( + _source_as_lines(source, False) + for source in sources[:1] + ), + itertools.chain.from_iterable( + _source_as_lines(source, True) + for source in sources[1:] + ), + ))) + + requirement_lines = sorted(vistir.misc.dedup( + _requirement_as_line(requirement, sources, include_hashes) + for requirement in requirements + )) + + with open_for_output(target) as f: + for line in source_lines: + f.write(line) + f.write("\n") + f.write("\n") + for line in requirement_lines: + f.write(line) + f.write("\n") diff --git a/pipenv/vendor/passa/actions/init.py b/pipenv/vendor/passa/actions/init.py new file mode 100644 index 0000000000..1d9f592302 --- /dev/null +++ b/pipenv/vendor/passa/actions/init.py @@ -0,0 +1,59 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import io +import os +from pip_shims import Command as PipCommand, cmdoptions +import plette +import six +import vistir + + +class PipCmd(PipCommand): + name = "PipCmd" + + +def get_sources(urls, trusted_hosts): + trusted_hosts = [six.moves.urllib.parse.urlparse(url).netloc for url in trusted_hosts] + sources = [] + for url in urls: + parsed_url = six.moves.urllib.parse.urlparse(url) + netloc = parsed_url.netloc + if '@' in netloc: + _, _, netloc = netloc.rpartition('@') + name, _, _ = netloc.partition('.') # Just use the domain name as the source name + verify_ssl = True + if netloc in trusted_hosts: + verify_ssl = False + sources.append({"url": url, "name": name, "verify_ssl": verify_ssl}) + return sources + + +def init_project(root=None, python_version=None): + pipfile_path = os.path.join(root, "Pipfile") + if os.path.isfile(pipfile_path): + raise RuntimeError("{0!r} is already a Pipfile project".format(root)) + if not os.path.exists(root): + vistir.path.mkdir_p(root, mode=0o755) + pip_command = PipCmd() + cmdoptions.make_option_group(cmdoptions.index_group, pip_command.parser) + parsed, _ = pip_command.parser.parse_args([]) + index_urls = [parsed.index_url] + parsed.extra_index_urls + sources = get_sources(index_urls, parsed.trusted_hosts) + data = { + "sources": sources, + "packages": {}, + "dev-packages": {}, + } + if python_version: + data["requires"] = {"python_version": python_version} + return create_project(pipfile_path=pipfile_path, data=data) + + +def create_project(pipfile_path, data={}): + pipfile = plette.pipfiles.Pipfile(data=data) + with io.open(pipfile_path, "w") as fh: + pipfile.dump(fh) + print("Successfully created new pipfile at {0!r}".format(pipfile_path)) + return 0 diff --git a/pipenv/vendor/passa/actions/install.py b/pipenv/vendor/passa/actions/install.py new file mode 100644 index 0000000000..1728dae576 --- /dev/null +++ b/pipenv/vendor/passa/actions/install.py @@ -0,0 +1,32 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + + +def install(project=None, check=True, dev=False, clean=True): + from passa.models.lockers import BasicLocker + from passa.operations.lock import lock + + project = project + + if not check or not project.is_synced(): + locker = BasicLocker(project) + success = lock(locker) + if not success: + return 1 + project._l.write() + print("Written to project at", project.root) + + from passa.models.synchronizers import Synchronizer + from passa.operations.sync import sync + + syncer = Synchronizer( + project, default=True, develop=dev, + clean_unneeded=clean, + ) + + success = sync(syncer) + if not success: + return 1 + + print("Synchronized project at", project.root) diff --git a/pipenv/vendor/passa/actions/lock.py b/pipenv/vendor/passa/actions/lock.py new file mode 100644 index 0000000000..7c09469555 --- /dev/null +++ b/pipenv/vendor/passa/actions/lock.py @@ -0,0 +1,17 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + + +def lock(project=None): + from passa.models.lockers import BasicLocker + from passa.operations.lock import lock + + project = project + locker = BasicLocker(project) + success = lock(locker) + if not success: + return + + project._l.write() + print("Written to project at", project.root) diff --git a/pipenv/vendor/passa/actions/remove.py b/pipenv/vendor/passa/actions/remove.py new file mode 100644 index 0000000000..158f5e6951 --- /dev/null +++ b/pipenv/vendor/passa/actions/remove.py @@ -0,0 +1,38 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + + +def remove(project=None, only="default", packages=[], clean=True): + from passa.models.lockers import PinReuseLocker + from passa.operations.lock import lock + + default = (only != "dev") + develop = (only != "default") + + project = project + project.remove_keys_from_pipfile( + packages, default=default, develop=develop, + ) + + locker = PinReuseLocker(project) + success = lock(locker) + if not success: + return 1 + + project._p.write() + project._l.write() + print("Written to project at", project.root) + + if not clean: + return + + from passa.models.synchronizers import Cleaner + from passa.operations.sync import clean + + cleaner = Cleaner(project, default=True, develop=True) + success = clean(cleaner) + if not success: + return 1 + + print("Cleaned project at", project.root) diff --git a/pipenv/vendor/passa/actions/sync.py b/pipenv/vendor/passa/actions/sync.py new file mode 100644 index 0000000000..23e36eebab --- /dev/null +++ b/pipenv/vendor/passa/actions/sync.py @@ -0,0 +1,20 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + + +def sync(project=None, dev=False, clean=True): + from passa.models.synchronizers import Synchronizer + from passa.operations.sync import sync + + project = project + syncer = Synchronizer( + project, default=True, develop=dev, + clean_unneeded=clean, + ) + + success = sync(syncer) + if not success: + return 1 + + print("Synchronized project at", project.root) diff --git a/pipenv/vendor/passa/actions/upgrade.py b/pipenv/vendor/passa/actions/upgrade.py new file mode 100644 index 0000000000..fb3ad7f581 --- /dev/null +++ b/pipenv/vendor/passa/actions/upgrade.py @@ -0,0 +1,52 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import sys + + +def upgrade(project=None, strategy="only-if-needed", sync=True, packages=[]): + from passa.models.lockers import EagerUpgradeLocker, PinReuseLocker + from passa.operations.lock import lock + + for package in packages: + if not project.contains_key_in_pipfile(package): + print("{package!r} not found in Pipfile".format( + package=package, + ), file=sys.stderr) + return 2 + + project.remove_keys_from_lockfile(packages) + + prev_lockfile = project.lockfile + + if strategy == "eager": + locker = EagerUpgradeLocker(project, packages) + else: + locker = PinReuseLocker(project) + success = lock(locker) + if not success: + return 1 + + project._l.write() + print("Written to project at", project.root) + + if not sync: + return + + from passa.operations.sync import sync + from passa.models.synchronizers import Synchronizer + + lockfile_diff = project.difference_lockfile(prev_lockfile) + default = bool(any(lockfile_diff.default)) + develop = bool(any(lockfile_diff.develop)) + + syncer = Synchronizer( + project, default=default, develop=develop, + clean_unneeded=False, + ) + success = sync(syncer) + if not success: + return 1 + + print("Synchronized project at", project.root) diff --git a/pipenv/vendor/passa/cli/__init__.py b/pipenv/vendor/passa/cli/__init__.py index cb503e8127..b6891d3891 100644 --- a/pipenv/vendor/passa/cli/__init__.py +++ b/pipenv/vendor/passa/cli/__init__.py @@ -34,7 +34,7 @@ def main(argv=None): continue parser = subparsers.add_parser(klass.name, help=klass.description) command = klass(parser) - parser.set_defaults(func=command.main) + parser.set_defaults(func=command.run) options = root_parser.parse_args(argv) diff --git a/pipenv/vendor/passa/cli/_base.py b/pipenv/vendor/passa/cli/_base.py index 68e0e34d36..0ca48682ed 100644 --- a/pipenv/vendor/passa/cli/_base.py +++ b/pipenv/vendor/passa/cli/_base.py @@ -6,31 +6,7 @@ import os import sys -import tomlkit.exceptions - - -def build_project(root): - # This is imported lazily to reduce import overhead. Not evey command - # needs the project instance. - from passa.internals.projects import Project - root = os.path.abspath(root) - if not os.path.isfile(os.path.join(root, "Pipfile")): - raise argparse.ArgumentError( - "{0!r} is not a Pipfile project".format(root), - ) - try: - project = Project(root) - except tomlkit.exceptions.ParseError as e: - raise argparse.ArgumentError( - "failed to parse Pipfile: {0!r}".format(str(e)), - ) - return project - - -# Better error reporting. Recent argparse would emit something like -# "invalid project root value: 'xxxxxx'". The str() wrapper is needed to -# keep Python 2 happy :( -build_project.__name__ = str("project root") +from .options import project class BaseCommand(object): @@ -38,19 +14,30 @@ class BaseCommand(object): """ name = None description = None - parsed_main = None - - def __init__(self, parser): + default_arguments = [project] + arguments = [] + + def __init__(self, parser=None): + if not parser: + parser = argparse.ArgumentParser( + prog=os.path.basename(sys.argv[0]), + description="Base argument parser for passa" + ) self.parser = parser self.add_arguments() @classmethod - def run_current_module(cls): + def build_parser(cls): parser = argparse.ArgumentParser( prog="passa {}".format(cls.name), description=cls.description, ) - cls(parser)() + return cls(parser) + + @classmethod + def run_parser(cls): + parser = cls.build_parser() + parser() def __call__(self, argv=None): options = self.parser.parse_args(argv) @@ -58,16 +45,17 @@ def __call__(self, argv=None): if result is not None: sys.exit(result) + def add_default_arguments(self): + for arg in self.default_arguments: + arg.add_to_parser(self.parser) + def add_arguments(self): - self.parser.add_argument( - "--project", - metavar="project", - default=os.getcwd(), - type=build_project, - help="path to project root (directory containing Pipfile)", - ) + self.add_default_arguments() + for arg in self.arguments: + arg.add_to_parser(self.parser) def main(self, options): - # This __dict__ access is needed for Python 2 to prevent Python from - # wrapping parsed_main into an unbounded method. - return type(self).__dict__["parsed_main"](options) + return self.run(options) + + def run(self, options): + raise NotImplementedError diff --git a/pipenv/vendor/passa/cli/add.py b/pipenv/vendor/passa/cli/add.py index 26ce0ed835..d5596cdeba 100644 --- a/pipenv/vendor/passa/cli/add.py +++ b/pipenv/vendor/passa/cli/add.py @@ -2,98 +2,27 @@ from __future__ import absolute_import, print_function, unicode_literals -import itertools -import sys - +from ..actions.add import add_packages from ._base import BaseCommand - - -def main(options): - from passa.internals.lockers import PinReuseLocker - from passa.operations.lock import lock - - lines = list(itertools.chain( - options.requirement_lines, - ("-e {}".format(e) for e in options.editable_lines), - )) - - project = options.project - for line in lines: - try: - project.add_line_to_pipfile(line, develop=options.dev) - except (TypeError, ValueError) as e: - print("Cannot add {line!r} to Pipfile: {error}".format( - line=line, error=str(e), - ), file=sys.stderr) - return 2 - - prev_lockfile = project.lockfile - - locker = PinReuseLocker(project) - success = lock(locker) - if not success: - return 1 - - project._p.write() - project._l.write() - print("Written to project at", project.root) - - if not options.sync: - return - - from passa.internals.synchronizers import Synchronizer - from passa.operations.sync import sync - - lockfile_diff = project.difference_lockfile(prev_lockfile) - default = bool(any(lockfile_diff.default)) - develop = bool(any(lockfile_diff.develop)) - - syncer = Synchronizer( - project, default=default, develop=develop, - clean_unneeded=False, - ) - success = sync(syncer) - if not success: - return 1 - - print("Synchronized project at", project.root) +from .options import package_group class Command(BaseCommand): name = "add" description = "Add packages to project." - parsed_main = main + arguments = [package_group] - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "requirement_lines", metavar="requirement", - nargs="*", - help="requirement to add (can be used multiple times)", - ) - self.parser.add_argument( - "-e", "--editable", - metavar="requirement", dest="editable_lines", - action="append", default=[], - help="editable requirement to add (can be used multiple times)", - ) - self.parser.add_argument( - "--dev", - action="store_true", - help="add packages to [dev-packages]", - ) - self.parser.add_argument( - "--no-sync", dest="sync", - action="store_false", default=True, - help="do not synchronize the environment", - ) - - def main(self, options): - if not options.editable_lines and not options.requirement_lines: + def run(self, options): + if not options.editables and not options.packages: self.parser.error("Must supply either a requirement or --editable") - return super(Command, self).main(options) + return add_packages( + packages=options.packages, + editables=options.editables, + project=options.project, + dev=options.dev + ) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/clean.py b/pipenv/vendor/passa/cli/clean.py index cd1b679b97..e23d5ee58c 100644 --- a/pipenv/vendor/passa/cli/clean.py +++ b/pipenv/vendor/passa/cli/clean.py @@ -2,37 +2,20 @@ from __future__ import absolute_import, print_function, unicode_literals +from ..actions.clean import clean from ._base import BaseCommand - - -def main(options): - from passa.internals.synchronizers import Cleaner - from passa.operations.sync import clean - - project = options.project - cleaner = Cleaner(project, default=True, develop=options.dev) - - success = clean(cleaner) - if not success: - return 1 - - print("Cleaned project at", project.root) +from .options import dev, no_default class Command(BaseCommand): name = "clean" description = "Uninstall unlisted packages from the environment." - parsed_main = main + arguments = [dev, no_default] - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "--no-dev", dest="dev", - action="store_false", default=True, - help="uninstall develop packages, only keep default ones", - ) + def run(self, options): + return clean(project=options.project, default=options.default, dev=options.dev) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/freeze.py b/pipenv/vendor/passa/cli/freeze.py index 6ca57170b1..053c72730e 100644 --- a/pipenv/vendor/passa/cli/freeze.py +++ b/pipenv/vendor/passa/cli/freeze.py @@ -2,137 +2,23 @@ from __future__ import absolute_import, print_function, unicode_literals -import contextlib -import io -import itertools -import sys - -import six -import vistir.misc - +from ..actions.freeze import freeze from ._base import BaseCommand - - -def _source_as_lines(source, extra): - url = source["url"] - if extra: - lines = ["--extra-index-url {}".format(url)] - else: - lines = ["--index-url {}".format(url)] - if not source.get("verify_ssl", True): - lines = ["--trusted-host {}".format(url)] - return lines - - -def _requirement_as_line(requirement, sources, include_hashes): - if requirement.index: - sources = sources - else: - sources = None - line = requirement.as_line(sources=sources, include_hashes=include_hashes) - if not isinstance(line, six.text_type): - line = line.decode("utf-8") - return line - - -@contextlib.contextmanager -def open_for_output(filename): - if filename is None: - yield sys.stdout - return - with io.open(filename, "w", encoding="utf-8", newline="\n") as f: - yield f - - -def main(options): - from requirementslib import Requirement - - lockfile = options.project.lockfile - if not lockfile: - print("Pipfile.lock is required to export.", file=sys.stderr) - return 1 - - section_names = [] - if options.default: - section_names.append("default") - if options.dev: - section_names.append("develop") - requirements = [ - Requirement.from_pipfile(key, entry._data) - for key, entry in itertools.chain.from_iterable( - lockfile.get(name, {}).items() - for name in section_names - ) - ] - - include_hashes = options.include_hashes - if include_hashes is None: - include_hashes = all(r.is_named for r in requirements) - - sources = lockfile.meta.sources._data - - source_lines = list(vistir.misc.dedup(itertools.chain( - itertools.chain.from_iterable( - _source_as_lines(source, False) - for source in sources[:1] - ), - itertools.chain.from_iterable( - _source_as_lines(source, True) - for source in sources[1:] - ), - ))) - - requirement_lines = sorted(vistir.misc.dedup( - _requirement_as_line(requirement, sources, include_hashes) - for requirement in requirements - )) - - with open_for_output(options.target) as f: - for line in source_lines: - f.write(line) - f.write("\n") - f.write("\n") - for line in requirement_lines: - f.write(line) - f.write("\n\n") +from .options import dev, include_hashes_group, no_default, target class Command(BaseCommand): name = "freeze" description = "Export project depenencies to requirements.txt." - parsed_main = main + arguments = [dev, no_default, target, include_hashes_group] - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "--target", - default=None, - help="file to export into (default is to print to stdout)", - ) - self.parser.add_argument( - "--dev", - action="store_true", default=False, - help="include development packages in requirements.txt", - ) - self.parser.add_argument( - "--no-default", dest="default", - action="store_false", default=True, - help="do not include default packages in requirements.txt", - ) - include_hashes_group = self.parser.add_mutually_exclusive_group() - include_hashes_group.add_argument( - "--include-hashes", dest="include_hashes", - action="store_true", - help="output hashes in requirements.txt (default is to guess)", - ) - include_hashes_group.add_argument( - "--no-include-hashes", dest="include_hashes", - action="store_false", - help=("do not output hashes in requirements.txt " - "(default is to guess)"), + def run(self, options): + return freeze( + project=options.project, default=options.default, dev=options.dev, + include_hashes=options.include_hashes ) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/init.py b/pipenv/vendor/passa/cli/init.py new file mode 100644 index 0000000000..95ce8d84c5 --- /dev/null +++ b/pipenv/vendor/passa/cli/init.py @@ -0,0 +1,32 @@ +# -*- coding=utf-8 -*- + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import os + +from ..actions.init import init_project +from ._base import BaseCommand +from .options import new_project_group + + +class Command(BaseCommand): + + name = "init" + description = "Create a new project." + default_arguments = [] + arguments = [new_project_group] + + def run(self, options): + pipfile_path = os.path.join(options.project, "Pipfile") + if os.path.exists(pipfile_path): + raise argparse.ArgumentError( + "{0!r} is already a Pipfile project".format(options.project), + ) + return init_project( + root=options.project, python_version=options.python_version + ) + + +if __name__ == "__main__": + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/install.py b/pipenv/vendor/passa/cli/install.py index f47377b1d7..1c0b45914a 100644 --- a/pipenv/vendor/passa/cli/install.py +++ b/pipenv/vendor/passa/cli/install.py @@ -2,62 +2,21 @@ from __future__ import absolute_import, print_function, unicode_literals +from ..actions.install import install from ._base import BaseCommand - - -def main(options): - from passa.internals.lockers import BasicLocker - from passa.operations.lock import lock - - project = options.project - - if not options.check or not project.is_synced(): - locker = BasicLocker(project) - success = lock(locker) - if not success: - return 1 - project._l.write() - print("Written to project at", project.root) - - from passa.internals.synchronizers import Synchronizer - from passa.operations.sync import sync - - syncer = Synchronizer( - project, default=True, develop=options.dev, - clean_unneeded=options.clean, - ) - - success = sync(syncer) - if not success: - return 1 - - print("Synchronized project at", project.root) +from .options import dev, no_check, no_clean class Command(BaseCommand): name = "install" description = "Generate Pipfile.lock to synchronize the environment." - parsed_main = main - - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "--no-check", dest="check", - action="store_false", default=True, - help="do not check if Pipfile.lock is update, always resolve", - ) - self.parser.add_argument( - "--dev", - action="store_true", - help="install develop packages", - ) - self.parser.add_argument( - "--no-clean", dest="clean", - action="store_false", default=True, - help="do not uninstall packages not specified in Pipfile.lock", - ) + arguments = [no_check, dev, no_clean] + + def run(self, options): + return install(project=options.project, check=options.check, dev=options.dev, + clean=options.clean) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/lock.py b/pipenv/vendor/passa/cli/lock.py index 67b1d118f0..9b0651a173 100644 --- a/pipenv/vendor/passa/cli/lock.py +++ b/pipenv/vendor/passa/cli/lock.py @@ -2,28 +2,17 @@ from __future__ import absolute_import, print_function, unicode_literals +from ..actions.lock import lock from ._base import BaseCommand -def main(options): - from passa.internals.lockers import BasicLocker - from passa.operations.lock import lock - - project = options.project - locker = BasicLocker(project) - success = lock(locker) - if not success: - return - - project._l.write() - print("Written to project at", project.root) - - class Command(BaseCommand): name = "lock" description = "Generate Pipfile.lock." - parsed_main = main + + def run(self, options): + return lock(project=options.project) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/options.py b/pipenv/vendor/passa/cli/options.py new file mode 100644 index 0000000000..da89a3b11b --- /dev/null +++ b/pipenv/vendor/passa/cli/options.py @@ -0,0 +1,153 @@ +# -*- coding=utf-8 -*- +from __future__ import absolute_import + +import argparse +import os +import sys + +import tomlkit.exceptions + +import passa.models.projects +import vistir + + +PYTHON_VERSION = ".".join(str(v) for v in sys.version_info[:2]) + + +class Project(passa.models.projects.Project): + def __init__(self, root, *args, **kwargs): + root = vistir.compat.Path(root).absolute() + pipfile = root.joinpath("Pipfile") + if not pipfile.is_file(): + raise argparse.ArgumentError( + "{0!r} is not a Pipfile project".format(root), + ) + try: + super(Project, self).__init__(root.as_posix(), *args, **kwargs) + except tomlkit.exceptions.ParseError as e: + raise argparse.ArgumentError( + "failed to parse Pipfile: {0!r}".format(str(e)), + ) + + def __name__(self): + return "Project Root" + + +class Option(object): + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + + def add_to_parser(self, parser): + parser.add_argument(*self.args, **self.kwargs) + + def add_to_group(self, group): + group.add_argument(*self.args, **self.kwargs) + + +class ArgumentGroup(object): + def __init__(self, name, parser=None, is_mutually_exclusive=False, required=None, options=[]): + self.name = name + self.options = options + self.parser = parser + self.required = required + self.is_mutually_exclusive = is_mutually_exclusive + self.argument_group = None + + def add_to_parser(self, parser): + group = None + if self.is_mutually_exclusive: + group = parser.add_mutually_exclusive_group(required=self.required) + else: + group = parser.add_argument_group() + for option in self.options: + option.add_to_group(group) + self.argument_group = group + self.parser = parser + + +project = Option( + "--project", metavar="project", default=os.getcwd(), type=Project, + help="path to project root (directory containing Pipfile)", +) + +new_project = Option( + "--project", metavar="project", default=os.getcwd(), type=str, + help="path to project root (directory containing Pipfile)", +) + +python_version = Option( + "--py-version", "--python-version", "--requires-python", metavar="python-version", + dest="python_version", default=PYTHON_VERSION, type=str, + help="required minor python version for the project" +) + +packages = Option( + "packages", metavar="package", nargs="*", + help="requirement to add (can be used multiple times)", +) + +editable = Option( + '-e', '--editable', dest='editables', nargs="*", default=[], metavar='path/vcs', + help="editable requirement to add (can be used multiple times)", +) + +dev = Option( + "--dev", action="store_true", default=False, + help="Use [dev-packages] for install/freeze/uninstall operations", +) + +no_sync = Option( + "--no-sync", dest="sync", action="store_false", default=True, + help="do not synchronize the environment", +) + +target = Option( + "-t", "--target", default=None, + help="file to export into (default is to print to stdout)" +) + +no_default = Option( + "--no-default", dest="default", action="store_false", default=True, + help="do not include default packages when exporting, importing, or cleaning" +) + +include_hashes = Option( + "--include-hashes", dest="include_hashes", action="store_true", + help="output hashes in requirements.txt (default is to guess)", +) + +no_include_hashes = Option( + "--no-include-hashes", dest="include_hashes", action="store_false", + help="do not output hashes in requirements.txt (default is to guess)", +) + +no_check = Option( + "--no-check", dest="check", action="store_false", default=True, + help="do not check if Pipfile.lock is up to date, always resolve", +) + +no_clean = Option( + "--no-clean", dest="clean", action="store_false", default=True, + help="do not remove packages not specified in Pipfile.lock", +) + +dev_only = Option( + "--dev", dest="only", action="store_const", const="dev", + help="only try to modify [dev-packages]", +) + +default_only = Option( + "--default", dest="only", action="store_const", const="default", + help="only try to modify [default]", +) + +strategy = Option( + "--strategy", choices=["eager", "only-if-needed"], default="only-if-needed", + help="how dependency upgrading is handled", +) + +include_hashes_group = ArgumentGroup("include_hashes", is_mutually_exclusive=True, options=[include_hashes, no_include_hashes]) +dev_group = ArgumentGroup("dev", is_mutually_exclusive="True", options=[dev_only, default_only]) +package_group = ArgumentGroup("packages", options=[packages, editable, dev, no_sync]) +new_project_group = ArgumentGroup("new-project", options=[new_project, python_version]) diff --git a/pipenv/vendor/passa/cli/remove.py b/pipenv/vendor/passa/cli/remove.py index b1dbfd7cc1..538acbf906 100644 --- a/pipenv/vendor/passa/cli/remove.py +++ b/pipenv/vendor/passa/cli/remove.py @@ -2,74 +2,21 @@ from __future__ import absolute_import, print_function, unicode_literals +from ..actions.remove import remove from ._base import BaseCommand - - -def main(options): - from passa.internals.lockers import PinReuseLocker - from passa.operations.lock import lock - - default = (options.only != "dev") - develop = (options.only != "default") - - project = options.project - project.remove_keys_from_pipfile( - options.packages, default=default, develop=develop, - ) - - locker = PinReuseLocker(project) - success = lock(locker) - if not success: - return 1 - - project._p.write() - project._l.write() - print("Written to project at", project.root) - - if not options.clean: - return - - from passa.internals.synchronizers import Cleaner - from passa.operations.sync import clean - - cleaner = Cleaner(project, default=True, develop=True) - success = clean(cleaner) - if not success: - return 1 - - print("Cleaned project at", project.root) +from .options import dev_group, no_clean, packages class Command(BaseCommand): name = "remove" description = "Remove packages from project." - parsed_main = main + arguments = [dev_group, no_clean, packages] - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "packages", metavar="package", - nargs="+", - help="package to remove (can be used multiple times)", - ) - dev_group = self.parser.add_mutually_exclusive_group() - dev_group.add_argument( - "--dev", dest="only", - action="store_const", const="dev", - help="only try to remove from [dev-packages]", - ) - dev_group.add_argument( - "--default", dest="only", - action="store_const", const="default", - help="only try to remove from [packages]", - ) - self.parser.add_argument( - "--no-clean", dest="clean", - action="store_false", default=True, - help="do not uninstall packages not specified in Pipfile.lock", - ) + def run(self, options): + return remove(project=options.project, only=options.only, + packages=options.packages, clean=options.clean) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/sync.py b/pipenv/vendor/passa/cli/sync.py index ade81e0a0f..a09b7842e0 100644 --- a/pipenv/vendor/passa/cli/sync.py +++ b/pipenv/vendor/passa/cli/sync.py @@ -2,45 +2,20 @@ from __future__ import absolute_import, print_function, unicode_literals +from ..actions.sync import sync from ._base import BaseCommand - - -def main(options): - from passa.internals.synchronizers import Synchronizer - from passa.operations.sync import sync - - project = options.project - syncer = Synchronizer( - project, default=True, develop=options.dev, - clean_unneeded=options.clean, - ) - - success = sync(syncer) - if not success: - return 1 - - print("Synchronized project at", project.root) +from .options import dev, no_clean class Command(BaseCommand): name = "sync" description = "Install Pipfile.lock into the environment." - parsed_main = main - - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "--dev", - action="store_true", - help="install develop packages", - ) - self.parser.add_argument( - "--no-clean", dest="clean", - action="store_false", default=True, - help="do not uninstall packages not specified in Pipfile.lock", - ) + arguments = [dev, no_clean] + + def run(self, options): + return sync(project=options.project, dev=options.dev, clean=options.clean) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/cli/upgrade.py b/pipenv/vendor/passa/cli/upgrade.py index 011fff6b68..cf7f502106 100644 --- a/pipenv/vendor/passa/cli/upgrade.py +++ b/pipenv/vendor/passa/cli/upgrade.py @@ -1,91 +1,21 @@ # -*- coding=utf-8 -*- - from __future__ import absolute_import, print_function, unicode_literals -import sys - +from ..actions.upgrade import upgrade from ._base import BaseCommand - - -def main(options): - from passa.internals.lockers import EagerUpgradeLocker, PinReuseLocker - from passa.operations.lock import lock - - project = options.project - packages = options.packages - for package in packages: - if not project.contains_key_in_pipfile(package): - print("{package!r} not found in Pipfile".format( - package=package, - ), file=sys.stderr) - return 2 - - project.remove_keys_from_lockfile(packages) - - prev_lockfile = project.lockfile - - if options.strategy == "eager": - locker = EagerUpgradeLocker(project, packages) - else: - locker = PinReuseLocker(project) - success = lock(locker) - if not success: - return 1 - - project._l.write() - print("Written to project at", project.root) - - if not options.sync: - return - - from passa.operations.sync import sync - from passa.internals.synchronizers import Synchronizer - - lockfile_diff = project.difference_lockfile(prev_lockfile) - default = bool(any(lockfile_diff.default)) - develop = bool(any(lockfile_diff.develop)) - - syncer = Synchronizer( - project, default=default, develop=develop, - clean_unneeded=False, - ) - success = sync(syncer) - if not success: - return 1 - - print("Synchronized project at", project.root) +from .options import no_clean, no_sync, packages, strategy class Command(BaseCommand): name = "upgrade" description = "Upgrade packages in project." - parsed_main = main + arguments = [packages, strategy, no_clean, no_sync] - def add_arguments(self): - super(Command, self).add_arguments() - self.parser.add_argument( - "packages", metavar="package", - nargs="+", - help="package to upgrade (can be used multiple times)", - ) - self.parser.add_argument( - "--strategy", - choices=["eager", "only-if-needed"], - default="only-if-needed", - help="how dependency upgrading is handled", - ) - self.parser.add_argument( - "--no-sync", dest="sync", - action="store_false", default=True, - help="do not synchronize the environment", - ) - self.parser.add_argument( - "--no-clean", dest="clean", - action="store_false", default=True, - help="do not uninstall packages not specified in Pipfile.lock", - ) + def run(self, options): + return upgrade(project=options.project, strategy=options.strategy, + sync=options.sync, packages=options.packages) if __name__ == "__main__": - Command.run_current_module() + Command.run_parser() diff --git a/pipenv/vendor/passa/internals/_pip.py b/pipenv/vendor/passa/internals/_pip.py index b762971364..2aa143a29b 100644 --- a/pipenv/vendor/passa/internals/_pip.py +++ b/pipenv/vendor/passa/internals/_pip.py @@ -3,19 +3,22 @@ from __future__ import absolute_import, unicode_literals import contextlib +import io +import itertools import distutils.log import os -import setuptools.dist - +import distlib.database import distlib.scripts import distlib.wheel +import packaging.utils import pip_shims +import setuptools.dist import six import vistir +from ..models.caches import CACHE_DIR from ._pip_shims import VCS_SUPPORT, build_wheel as _build_wheel, unpack_url -from .caches import CACHE_DIR from .utils import filter_sources @@ -44,7 +47,6 @@ def _prepare_wheel_building_kwargs(ireq): else: src_dir = vistir.path.create_tracked_tempdir(prefix='passa-src') - # This logic matches pip's behavior, although I don't fully understand the # intention. I guess the idea is to build editables in-place, otherwise out # of the source tree? @@ -128,6 +130,10 @@ def _convert_hashes(values): return hashes +class WheelBuildError(RuntimeError): + pass + + def build_wheel(ireq, sources, hashes=None): """Build a wheel file for the InstallRequirement object. @@ -138,8 +144,8 @@ def build_wheel(ireq, sources, hashes=None): If `hashes` is truthy, it is assumed to be a list of hashes (as formatted in Pipfile.lock) to be checked against the download. - Returns a `distlib.wheel.Wheel` instance. Raises a `RuntimeError` if the - wheel cannot be built. + Returns a `distlib.wheel.Wheel` instance. Raises a `WheelBuildError` (a + `RuntimeError` subclass) if the wheel cannot be built. """ kwargs = _prepare_wheel_building_kwargs(ireq) finder = _get_finder(sources) @@ -172,7 +178,7 @@ def build_wheel(ireq, sources, hashes=None): unpack_url( ireq.link, ireq.source_dir, download_dir, only_download=only_download, session=finder.session, - hashes=ireq.hashes(False), progress_bar=False, + hashes=ireq.hashes(False), progress_bar="off", ) if ireq.is_wheel: @@ -186,7 +192,7 @@ def build_wheel(ireq, sources, hashes=None): finder, _get_wheel_cache(), kwargs, ) if wheel_path is None or not os.path.exists(wheel_path): - raise RuntimeError("failed to build wheel from {}".format(ireq)) + raise WheelBuildError return distlib.wheel.Wheel(wheel_path) @@ -202,7 +208,7 @@ def _obtrain_ref(vcs_obj, src_dir, name, rev=None): def get_vcs_ref(requirement): - backend = VCS_SUPPORT._registry.get(requirement.vcs) + backend = VCS_SUPPORT.get_backend(requirement.vcs) vcs = backend(url=requirement.req.vcs_uri) src = _get_src_dir() name = requirement.normalized_name @@ -215,7 +221,7 @@ def find_installation_candidates(ireq, sources): return finder.find_all_candidates(ireq.name) -class RequirementUninstallation(object): +class RequirementUninstaller(object): """A context manager to remove a package for the inner block. This uses `UninstallPathSet` to control the workflow. If the inner block @@ -243,8 +249,9 @@ def __exit__(self, exc_type, exc_value, traceback): self.pathset.rollback() -def uninstall_requirement(ireq, **kwargs): - return RequirementUninstallation(ireq, **kwargs) +def uninstall(name, **kwargs): + ireq = pip_shims.InstallRequirement.from_line(name) + return RequirementUninstaller(ireq, **kwargs) @contextlib.contextmanager @@ -315,3 +322,76 @@ def prepare(self): def install(self): self.wheel.install(self.paths, distlib.scripts.ScriptMaker(None, None)) + + +def _iter_egg_info_directories(root, name): + name = packaging.utils.canonicalize_name(name) + for parent, dirnames, filenames in os.walk(root): + matched_indexes = [] + for i, dirname in enumerate(dirnames): + if not dirname.lower().endswith("egg-info"): + continue + egg_info_name = packaging.utils.canonicalize_name(dirname[:-9]) + if egg_info_name != name: + continue + matched_indexes.append(i) + yield os.path.join(parent, dirname) + + # Modify dirnames in-place to NOT look into egg-info directories. + # This is a documented behavior in stdlib. + for i in reversed(matched_indexes): + del dirnames[i] + + +def _read_pkg_info(directory): + path = os.path.join(directory, "PKG-INFO") + try: + with io.open(path, encoding="utf-8", errors="replace") as f: + return f.read() + except (IOError, OSError): + return None + + +def _find_egg_info(ireq): + """Find this package's .egg-info directory. + + Due to how sdists are designed, the .egg-info directory cannot be reliably + found without running setup.py to aggregate all configurations. This + function instead uses some heuristics to locate the egg-info directory + that most likely represents this package. + + The best .egg-info directory's path is returned as a string. None is + returned if no matches can be found. + """ + root = ireq.setup_py_dir + + directory_iterator = _iter_egg_info_directories(root, ireq.name) + try: + top_egg_info = next(directory_iterator) + except StopIteration: # No egg-info found. Wat. + return None + directory_iterator = itertools.chain([top_egg_info], directory_iterator) + + # Read the sdist's PKG-INFO to determine which egg_info is best. + pkg_info = _read_pkg_info(root) + + # PKG-INFO not readable. Just return whatever comes first, I guess. + if pkg_info is None: + return top_egg_info + + # Walk the sdist to find the egg-info with matching PKG-INFO. + for directory in directory_iterator: + egg_pkg_info = _read_pkg_info(directory) + if egg_pkg_info == pkg_info: + return directory + + # Nothing matches...? Use the first one we found, I guess. + return top_egg_info + + +def read_sdist_metadata(ireq): + egg_info_dir = _find_egg_info(ireq) + if not egg_info_dir: + return None + distribution = distlib.database.EggInfoDistribution(egg_info_dir) + return distribution.metadata diff --git a/pipenv/vendor/passa/internals/candidates.py b/pipenv/vendor/passa/internals/candidates.py index 1b154bbf04..67b09049c3 100644 --- a/pipenv/vendor/passa/internals/candidates.py +++ b/pipenv/vendor/passa/internals/candidates.py @@ -2,9 +2,6 @@ from __future__ import absolute_import, unicode_literals -import os -import sys - import packaging.specifiers import packaging.version import requirementslib @@ -12,13 +9,17 @@ from ._pip import find_installation_candidates, get_vcs_ref -def _filter_matching_python_requirement(candidates, python_version): +def _filter_matching_python_requirement(candidates, required_python): + # TODO: This should also takes the parent's python_version and + # python_full_version markers, and only return matches with valid + # intersections. For example, if parent requires `python_version >= '3.0'`, + # this should not return entries with "Requires-Python: <3". for c in candidates: try: requires_python = c.requires_python except AttributeError: requires_python = c.location.requires_python - if python_version and requires_python: + if required_python and requires_python: # Old specifications had people setting this to single digits # which is effectively the same as '>=digit,".format(type(self).__name__, self.project.root) @@ -132,7 +149,8 @@ def lock(self): set_metadata( state.mapping, traces, - provider.fetched_dependencies, provider.requires_pythons, + provider.fetched_dependencies, + provider.collected_requires_pythons, ) lockfile = plette.Lockfile.with_meta_from(self.project.pipfile) @@ -153,7 +171,8 @@ class BasicLocker(AbstractLocker): """ def get_provider(self): return BasicProvider( - self.requirements, self.sources, self.allow_prereleases, + self.requirements, self.sources, + self.requires_python, self.allow_prereleases, ) @@ -172,8 +191,8 @@ def __init__(self, project): def get_provider(self): return PinReuseProvider( - self.preferred_pins, - self.requirements, self.sources, self.allow_prereleases, + self.preferred_pins, self.requirements, self.sources, + self.requires_python, self.allow_prereleases, ) @@ -190,5 +209,6 @@ def __init__(self, tracked_names, *args, **kwargs): def get_provider(self): return EagerUpgradeProvider( self.tracked_names, self.preferred_pins, - self.requirements, self.sources, self.allow_prereleases, + self.requirements, self.sources, + self.requires_python, self.allow_prereleases, ) diff --git a/pipenv/vendor/passa/internals/metadata.py b/pipenv/vendor/passa/models/metadata.py similarity index 97% rename from pipenv/vendor/passa/internals/metadata.py rename to pipenv/vendor/passa/models/metadata.py index 9709c5355f..a949f1e91a 100644 --- a/pipenv/vendor/passa/internals/metadata.py +++ b/pipenv/vendor/passa/models/metadata.py @@ -10,8 +10,8 @@ import vistir import vistir.misc -from .markers import get_without_extra -from .specifiers import cleanup_pyspecs, pyspec_from_markers +from ..internals.markers import get_without_extra +from ..internals.specifiers import cleanup_pyspecs, pyspec_from_markers def dedup_markers(s): diff --git a/pipenv/vendor/passa/internals/projects.py b/pipenv/vendor/passa/models/projects.py similarity index 96% rename from pipenv/vendor/passa/internals/projects.py rename to pipenv/vendor/passa/models/projects.py index 6a9fcce510..f6e037d651 100644 --- a/pipenv/vendor/passa/internals/projects.py +++ b/pipenv/vendor/passa/models/projects.py @@ -218,8 +218,14 @@ def difference_lockfile(self, lockfile): "develop": SectionDifference({}, {}), } for section_name, section_diff in diff_data.items(): - this = self.lockfile[section_name]._data - that = lockfile[section_name]._data + try: + this = self.lockfile[section_name]._data + except (KeyError, TypeError): + this = {} + try: + that = lockfile[section_name]._data + except (KeyError, TypeError): + that = {} for key, this_value in this.items(): try: that_value = that[key] diff --git a/pipenv/vendor/passa/internals/providers.py b/pipenv/vendor/passa/models/providers.py similarity index 92% rename from pipenv/vendor/passa/internals/providers.py rename to pipenv/vendor/passa/models/providers.py index 1c062af71b..36b2f2ea91 100644 --- a/pipenv/vendor/passa/internals/providers.py +++ b/pipenv/vendor/passa/models/providers.py @@ -6,11 +6,10 @@ import resolvelib -from .candidates import find_candidates -from .dependencies import get_dependencies -from .utils import ( - filter_sources, get_allow_prereleases, - are_requirements_equal, identify_requirment, strip_extras, +from ..internals.candidates import find_candidates +from ..internals.dependencies import get_dependencies +from ..internals.utils import ( + filter_sources, get_allow_prereleases, identify_requirment, strip_extras, ) @@ -20,8 +19,10 @@ class BasicProvider(resolvelib.AbstractProvider): """Provider implementation to interface with `requirementslib.Requirement`. """ - def __init__(self, root_requirements, sources, allow_prereleases): + def __init__(self, root_requirements, sources, + requires_python, allow_prereleases): self.sources = sources + self.requires_python = requires_python self.allow_prereleases = bool(allow_prereleases) self.invalid_candidates = set() @@ -33,9 +34,9 @@ def __init__(self, root_requirements, sources, allow_prereleases): self.fetched_dependencies = {None: { self.identify(r): r for r in root_requirements }} - # TODO: Find a way to resolve with multiple versions (by tricking - # runtime) Include multiple keys in pipfiles? - self.requires_pythons = {None: ""} # TODO: Don't use any value + + # Should Pipfile's requires.python_[full_]version be included? + self.collected_requires_pythons = {None: ""} def identify(self, dependency): return identify_requirment(dependency) @@ -46,8 +47,9 @@ def get_preference(self, resolution, candidates, information): return len(candidates) def find_matches(self, requirement): + sources = filter_sources(requirement, self.sources) candidates = find_candidates( - requirement, filter_sources(requirement, self.sources), + requirement, sources, self.requires_python, get_allow_prereleases(requirement, self.allow_prereleases), ) return candidates @@ -118,7 +120,7 @@ def get_dependencies(self, candidate): self.fetched_dependencies[candidate_key] = { self.identify(r): r for r in dependencies } - self.requires_pythons[candidate_key] = requires_python + self.collected_requires_pythons[candidate_key] = requires_python return dependencies diff --git a/pipenv/vendor/passa/internals/synchronizers.py b/pipenv/vendor/passa/models/synchronizers.py similarity index 91% rename from pipenv/vendor/passa/internals/synchronizers.py rename to pipenv/vendor/passa/models/synchronizers.py index 2a1f1d18b2..bad4905241 100644 --- a/pipenv/vendor/passa/internals/synchronizers.py +++ b/pipenv/vendor/passa/models/synchronizers.py @@ -14,7 +14,7 @@ import packaging.version import requirementslib -from ._pip import uninstall_requirement, EditableInstaller, WheelInstaller +from ..internals._pip import uninstall, EditableInstaller, WheelInstaller def _is_installation_local(name): @@ -23,8 +23,9 @@ def _is_installation_local(name): This is used to distinguish packages seen by a virtual environment. A venv may be able to see global packages, but we don't want to mess with them. """ - location = pkg_resources.working_set.by_key[name].location - return os.path.commonprefix([location, sys.prefix]) == sys.prefix + loc = os.path.normcase(pkg_resources.working_set.by_key[name].location) + pre = os.path.normcase(sys.prefix) + return os.path.commonprefix([loc, pre]) == pre def _is_up_to_date(distro, version): @@ -76,11 +77,10 @@ def _group_installed_names(packages): @contextlib.contextmanager def _remove_package(name): if name is None or not _is_installation_local(name): - yield + yield None return - r = requirementslib.Requirement.from_line(name) - with uninstall_requirement(r.as_ireq(), auto_confirm=True, verbose=False): - yield + with uninstall(name, auto_confirm=True, verbose=False) as uninstaller: + yield uninstaller def _get_packages(lockfile, default, develop): @@ -109,7 +109,7 @@ def _build_paths(): } -PROTECTED_FROM_CLEAN = {"setuptools", "pip"} +PROTECTED_FROM_CLEAN = {"setuptools", "pip", "wheel"} def _clean(names): @@ -117,9 +117,9 @@ def _clean(names): for name in names: if name in PROTECTED_FROM_CLEAN: continue - with _remove_package(name): - pass - cleaned.add(name) + with _remove_package(name) as uninst: + if uninst: + cleaned.add(name) return cleaned @@ -210,5 +210,5 @@ def __repr__(self): def clean(self): groupcoll = _group_installed_names(self.packages) - _clean(groupcoll.unneeded) - return groupcoll.unneeded + cleaned = _clean(groupcoll.unneeded) + return cleaned diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py index acaeee25f6..8cb2bca117 100644 --- a/pipenv/vendor/pip_shims/__init__.py +++ b/pipenv/vendor/pip_shims/__init__.py @@ -1,79 +1,24 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import -__version__ = '0.1.2' +import sys -__all__ = [ - "__version__", - "_strip_extras", - "cmdoptions", - "Command", - "ConfigOptionParser", - "DistributionNotFound", - "FAVORITE_HASH", - "FormatControl", - "get_installed_distributions", - "index_group", - "InstallRequirement", - "is_archive_file", - "is_file_url", - "is_installable_dir", - "unpack_url", - "Link", - "make_abstract_dist", - "make_option_group", - "PackageFinder", - "parse_requirements", - "parse_version", - "path_to_url", - "pip_version", - "PipError", - "RequirementPreparer", - "RequirementSet", - "RequirementTracker", - "Resolver", - "SafeFileCache", - "url_to_path", - "USER_CACHE_DIR", - "VcsSupport", - "Wheel", - "WheelCache", - "WheelBuilder" -] +__version__ = '0.3.0' -from .shims import ( - _strip_extras, - cmdoptions, - Command, - ConfigOptionParser, - DistributionNotFound, - FAVORITE_HASH, - FormatControl, - get_installed_distributions, - index_group, - InstallRequirement, - is_archive_file, - is_file_url, - unpack_url, - is_installable_dir, - Link, - make_abstract_dist, - make_option_group, - PackageFinder, - parse_requirements, - parse_version, - path_to_url, - pip_version, - PipError, - RequirementPreparer, - RequirementSet, - RequirementTracker, - Resolver, - SafeFileCache, - url_to_path, - USER_CACHE_DIR, - VcsSupport, - Wheel, - WheelCache, - WheelBuilder, -) +from . import shims + + +old_module = sys.modules["pip_shims"] + + +module = sys.modules["pip_shims"] = shims._new() +module.shims = shims +module.__dict__.update({ + '__file__': __file__, + '__package__': "pip_shims", + '__path__': __path__, + '__doc__': __doc__, + '__all__': module.__all__ + ['shims',], + '__version__': __version__, + '__name__': __name__ +}) diff --git a/pipenv/vendor/pip_shims/shims.py b/pipenv/vendor/pip_shims/shims.py index 5fc58beb1d..656ff7f58c 100644 --- a/pipenv/vendor/pip_shims/shims.py +++ b/pipenv/vendor/pip_shims/shims.py @@ -1,163 +1,326 @@ # -*- coding=utf-8 -*- from collections import namedtuple from contextlib import contextmanager -from .utils import _parse, get_package, STRING_TYPES import importlib import os -from pipenv.patched.notpip import __version__ as pip_version import sys -has_modutil = False -if sys.version_info[:2] >= (3, 7): - try: - import modutil - except ImportError: - has_modutil = False - else: - has_modutil = True - - -BASE_IMPORT_PATH = os.environ.get("PIP_SHIMS_BASE_MODULE", "pip") -path_info = namedtuple("PathInfo", "path start_version end_version") -parsed_pip_version = _parse(pip_version) - - -def is_valid(path_info_tuple): - if ( - path_info_tuple.start_version <= parsed_pip_version - and path_info_tuple.end_version >= parsed_pip_version - ): - return 1 - return 0 - - -def get_ordered_paths(module_paths, base_path): - if not isinstance(module_paths, list): - module_paths = [module_paths] - prefix_order = [pth.format(base_path) for pth in ["{0}._internal", "{0}"]] - if _parse(pip_version) < _parse("10.0.0"): - prefix_order = reversed(prefix_order) - paths = sorted(module_paths, key=is_valid, reverse=True) - search_order = [ - "{0}.{1}".format(p, pth.path) - for p in prefix_order - for pth in paths - if pth is not None - ] - return search_order - - -def do_import(module_paths, base_path=BASE_IMPORT_PATH): - search_order = get_ordered_paths(module_paths, base_path) - imported = None - if has_modutil: - pkgs = [get_package(pkg) for pkg in search_order] - imports = [ - modutil.lazy_import(__name__, {to_import}) for to_import, pkg in pkgs - ] - imp_getattrs = [imp_getattr for mod, imp_getattr in imports] - chained = modutil.chained___getattr__(__name__, *imp_getattrs) +import six +six.add_move(six.MovedAttribute("Callable", "collections", "collections.abc")) +from six.moves import Callable + + +class _shims(object): + CURRENT_PIP_VERSION = "18.1" + BASE_IMPORT_PATH = os.environ.get("PIP_SHIMS_BASE_MODULE", "pip") + path_info = namedtuple("PathInfo", "path start_version end_version") + + def __dir__(self): + result = list(self._locations.keys()) + list(self.__dict__.keys()) + result.extend(('__file__', '__doc__', '__all__', + '__docformat__', '__name__', '__path__', + '__package__', '__version__')) + return result + + @classmethod + def _new(cls): + return cls() + + @property + def __all__(self): + return list(self._locations.keys()) + + def __init__(self): + from .utils import _parse, get_package, STRING_TYPES + self._parse = _parse + self.get_package = get_package + self.STRING_TYPES = STRING_TYPES + self._modules = { + "pip": importlib.import_module("pip"), + } + self.pip_version = getattr(self._modules["pip"], "__version__") + self.parsed_pip_version = self._parse(self.pip_version) + self._contextmanagers = ("RequirementTracker",) + self._moves = { + "InstallRequirement": { + "from_editable": "install_req_from_editable", + "from_line": "install_req_from_line", + } + } + self._locations = { + "parse_version": ("index.parse_version", "7", "9999"), + "_strip_extras": ( + ("req.req_install._strip_extras", "7", "18.0"), + ("req.constructors._strip_extras", "18.1", "9999"), + ), + "cmdoptions": ( + ("cli.cmdoptions", "18.1", "9999"), + ("cmdoptions", "7.0.0", "18.0") + ), + "Command": ( + ("cli.base_command.Command", "18.1", "9999"), + ("basecommand.Command", "7.0.0", "18.0") + ), + "ConfigOptionParser": ( + ("cli.parser.ConfigOptionParser", "18.1", "9999"), + ("baseparser.ConfigOptionParser", "7.0.0", "18.0") + ), + "DistributionNotFound": ("exceptions.DistributionNotFound", "7.0.0", "9999"), + "FAVORITE_HASH": ("utils.hashes.FAVORITE_HASH", "7.0.0", "9999"), + "FormatControl": ( + ("models.format_control.FormatControl", "18.1", "9999"), + ("index.FormatControl", "7.0.0", "18.0"), + ), + "FrozenRequirement": ( + ("FrozenRequirement", "7.0.0", "9.0.3"), + ("operations.freeze.FrozenRequirement", "10.0.0", "9999") + ), + "get_installed_distributions": ( + ("utils.misc.get_installed_distributions", "10", "9999"), + ("utils.get_installed_distributions", "7", "9.0.3") + ), + "index_group": ( + ("cli.cmdoptions.index_group", "18.1", "9999"), + ("cmdoptions.index_group", "7.0.0", "18.0") + ), + "InstallRequirement": ("req.req_install.InstallRequirement", "7.0.0", "9999"), + "install_req_from_editable": ( + ("req.constructors.install_req_from_editable", "18.1", "9999"), + ("req.req_install.InstallRequirement.from_editable", "7.0.0", "18.0") + ), + "install_req_from_line": ( + ("req.constructors.install_req_from_line", "18.1", "9999"), + ("req.req_install.InstallRequirement.from_line", "7.0.0", "18.0") + ), + "is_archive_file": ("download.is_archive_file", "7.0.0", "9999"), + "is_file_url": ("download.is_file_url", "7.0.0", "9999"), + "unpack_url": ("download.unpack_url", "7.0.0", "9999"), + "is_installable_dir": ( + ("utils.misc.is_installable_dir", "10.0.0", "9999"), + ("utils.is_installable_dir", "7.0.0", "9.0.3") + ), + "Link": ("index.Link", "7.0.0", "9999"), + "make_abstract_dist": ( + ("operations.prepare.make_abstract_dist", "10.0.0", "9999"), + ("req.req_set.make_abstract_dist", "7.0.0", "9.0.3") + ), + "make_option_group": ( + ("cli.cmdoptions.make_option_group", "18.1", "9999"), + ("cmdoptions.make_option_group", "7.0.0", "18.0") + ), + "PackageFinder": ("index.PackageFinder", "7.0.0", "9999"), + "parse_requirements": ("req.req_file.parse_requirements", "7.0.0", "9999"), + "parse_version": ("index.parse_version", "7.0.0", "9999"), + "path_to_url": ("download.path_to_url", "7.0.0", "9999"), + "PipError": ("exceptions.PipError", "7.0.0", "9999"), + "RequirementPreparer": ("operations.prepare.RequirementPreparer", "7", "9999"), + "RequirementSet": ("req.req_set.RequirementSet", "7.0.0", "9999"), + "RequirementTracker": ("req.req_tracker.RequirementTracker", "7.0.0", "9999"), + "Resolver": ("resolve.Resolver", "7.0.0", "9999"), + "SafeFileCache": ("download.SafeFileCache", "7.0.0", "9999"), + "UninstallPathSet": ("req.req_uninstall.UninstallPathSet", "7.0.0", "9999"), + "url_to_path": ("download.url_to_path", "7.0.0", "9999"), + "USER_CACHE_DIR": ("locations.USER_CACHE_DIR", "7.0.0", "9999"), + "VcsSupport": ("vcs.VcsSupport", "7.0.0", "9999"), + "Wheel": ("wheel.Wheel", "7.0.0", "9999"), + "WheelCache": ( + ("cache.WheelCache", "10.0.0", "9999"), + ("wheel.WheelCache", "7", "9.0.3") + ), + "WheelBuilder": ("wheel.WheelBuilder", "7.0.0", "9999"), + } + + def _ensure_methods(self, cls, classname, *methods): + method_names = [m[0] for m in methods] + if all(getattr(cls, m, None) for m in method_names): + return cls + new_functions = {} + class BaseFunc(Callable): + def __init__(self, func_base, name, *args, **kwargs): + self.func = func_base + self.__name__ = self.__qualname__ = name + + def __call__(self, cls, *args, **kwargs): + return self.func(*args, **kwargs) + + for method_name, fn in methods: + new_functions[method_name] = classmethod(BaseFunc(fn, method_name)) + if six.PY2: + classname = classname.encode(sys.getdefaultencoding()) + type_ = type( + classname, + (cls,), + new_functions + ) + return type_ + + def _get_module_paths(self, module, base_path=None): + if not base_path: + base_path = self.BASE_IMPORT_PATH + module = self._locations[module] + if not isinstance(next(iter(module)), (tuple, list)): + module_paths = self.get_pathinfo(module) + else: + module_paths = [self.get_pathinfo(pth) for pth in module] + return self.sort_paths(module_paths, base_path) + + def _get_remapped_methods(self, moved_package): + original_base, original_target = moved_package + original_import = self._import(self._locations[original_target]) + old_to_new = {} + new_to_old = {} + for method_name, new_method_name in self._moves.get(original_target, {}).items(): + module_paths = self._get_module_paths(new_method_name) + target = next(iter( + sorted(set([ + tgt for mod, tgt in map(self.get_package, module_paths) + ]))), None + ) + old_to_new[method_name] = { + "target": target, + "name": new_method_name, + "location": self._locations[new_method_name], + "module": self._import(self._locations[new_method_name]) + } + new_to_old[new_method_name] = { + "target": original_target, + "name": method_name, + "location": self._locations[original_target], + "module": original_import + } + return (old_to_new, new_to_old) + + def _import_moved_module(self, moved_package): + old_to_new, new_to_old = self._get_remapped_methods(moved_package) imported = None - for to_import, pkg in pkgs: - _, _, module_name = to_import.rpartition(".") - try: - imported = chained(module_name) - except (modutil.ModuleAttributeError, ImportError): - continue + method_map = [] + new_target = None + for old_method, remapped in old_to_new.items(): + new_name = remapped["name"] + new_target = new_to_old[new_name]["target"] + if not imported: + imported = self._modules[new_target] = new_to_old[new_name]["module"] + method_map.append((old_method, remapped["module"])) + if getattr(imported, "__class__", "") == type: + imported = self._ensure_methods( + imported, new_target, *method_map + ) + self._modules[new_target] = imported + if imported: + return imported + return + + def _check_moved_methods(self, search_pth, moves): + module_paths = [ + self.get_package(pth) for pth in self._get_module_paths(search_pth) + ] + moved_methods = [ + (base, target_cls) for base, target_cls + in module_paths if target_cls in moves + ] + return next(iter(moved_methods), None) + + def __getattr__(self, *args, **kwargs): + locations = super(_shims, self).__getattribute__("_locations") + contextmanagers = super(_shims, self).__getattribute__("_contextmanagers") + moves = super(_shims, self).__getattribute__("_moves") + if args[0] in locations: + moved_package = self._check_moved_methods(args[0], moves) + if moved_package: + imported = self._import_moved_module(moved_package) + if imported: + return imported else: - if not imported: - continue - return getattr(imported, pkg) - if not imported: - return - return imported - for to_import in search_order: - to_import, package = get_package(to_import) + imported = self._import(locations[args[0]]) + if not imported and args[0] in contextmanagers: + return self.nullcontext + return imported + return super(_shims, self).__getattribute__(*args, **kwargs) + + def is_valid(self, path_info_tuple): + if ( + path_info_tuple.start_version <= self.parsed_pip_version + and path_info_tuple.end_version >= self.parsed_pip_version + ): + return 1 + return 0 + + def sort_paths(self, module_paths, base_path): + if not isinstance(module_paths, list): + module_paths = [module_paths] + prefix_order = [pth.format(base_path) for pth in ["{0}._internal", "{0}"]] + # Pip 10 introduced the internal api division + if self._parse(self.pip_version) < self._parse("10.0.0"): + prefix_order = reversed(prefix_order) + paths = sorted(module_paths, key=self.is_valid, reverse=True) + search_order = [ + "{0}.{1}".format(p, pth.path) + for p in prefix_order + for pth in paths + if pth is not None + ] + return search_order + + def import_module(self, module): + if module in self._modules: + return self._modules[module] try: - imported = importlib.import_module(to_import) + imported = importlib.import_module(module) except ImportError: - continue + imported = None else: - return getattr(imported, package) - return imported - - -def pip_import(import_name, *module_paths): - paths = [] - for pip_path in module_paths: - if not isinstance(pip_path, (list, tuple)): - module_path, start_version, end_version = module_paths - new_path = path_info(module_path, _parse(start_version), _parse(end_version)) - paths.append(new_path) - break + self._modules[module] = imported + return imported + + def none_or_ctxmanager(self, pkg_name): + if pkg_name in self._contextmanagers: + return self.nullcontext + return None + + def get_package_from_modules(self, modules): + modules = [ + (package_name, self.import_module(m)) + for m, package_name in map(self.get_package, modules) + ] + imports = [ + getattr(m, pkg, self.none_or_ctxmanager(pkg)) for pkg, m in modules + if m is not None + ] + return next(iter(imports), None) + + def _import(self, module_paths, base_path=None): + if not base_path: + base_path = self.BASE_IMPORT_PATH + if not isinstance(next(iter(module_paths)), (tuple, list)): + module_paths = self.get_pathinfo(module_paths) else: - module_path, start_version, end_version = pip_path - paths.append(path_info(module_path, _parse(start_version), _parse(end_version))) - return do_import(paths) - - -parse_version = pip_import("parse_version", "index.parse_version", "7", "9999") -_strip_extras = pip_import("_strip_extras", "req.req_install._strip_extras", "7", "9999") -cmdoptions = pip_import( - "", ("cli.cmdoptions", "18.1", "9999"), ("cmdoptions", "7.0.0", "18.0"), -) -Command = pip_import("Command", - ("cli.base_command.Command", "18.1", "9999"), - ("basecommand.Command", "7.0.0", "18.0"), -) -ConfigOptionParser = pip_import("ConfigOptionParser", - ("cli.parser.ConfigOptionParser", "18.1", "9999"), - ("baseparser.ConfigOptionParser", "7.0.0", "18.0"), -) -DistributionNotFound = pip_import("DistributionNotFound", "exceptions.DistributionNotFound", "7.0.0", "9999") -FAVORITE_HASH = pip_import("FAVORITE_HASH", "utils.hashes.FAVORITE_HASH", "7.0.0", "9999") -FormatControl = pip_import("FormatControl", "index.FormatControl", "7.0.0", "9999") -get_installed_distributions = pip_import("get_installed_distributions", - ("utils.misc.get_installed_distributions", "10", "9999"), - ("utils.get_installed_distributions", "7", "9.0.3") -) -index_group = pip_import("index_group", - ("cli.cmdoptions.index_group", "18.1", "9999"), - ("cmdoptions.index_group", "7.0.0", "18.0"), -) -InstallRequirement = pip_import("InstallRequirement", "req.req_install.InstallRequirement", "7.0.0", "9999") -is_archive_file = pip_import("is_archive_file", "download.is_archive_file", "7.0.0", "9999") -is_file_url = pip_import("is_file_url", "download.is_file_url", "7.0.0", "9999") -unpack_url = pip_import("unpack_url", "download.unpack_url", "7.0.0", "9999") -is_installable_dir = pip_import("is_installable_dir", - ("utils.misc.is_installable_dir", "10.0.0", "9999"), - ("utils.is_installable_dir", "7.0.0", "9.0.3"), -) -Link = pip_import("Link", "index.Link", "7.0.0", "9999") -make_abstract_dist = pip_import("make_abstract_dist", - ("operations.prepare.make_abstract_dist", "10.0.0", "9999"), - ("req.req_set.make_abstract_dist", "7.0.0", "9.0.3"), -) -make_option_group = pip_import("make_option_group", - ("cli.cmdoptions.make_option_group", "18.1", "9999"), - ("cmdoptions.make_option_group", "7.0.0", "18.0"), -) -PackageFinder = pip_import("PackageFinder", "index.PackageFinder", "7.0.0", "9999") -parse_requirements = pip_import("parse_requirements", "req.req_file.parse_requirements", "7.0.0", "9999") -parse_version = pip_import("parse_version", "index.parse_version", "7.0.0", "9999") -path_to_url = pip_import("path_to_url", "download.path_to_url", "7.0.0", "9999") -PipError = pip_import("PipError", "exceptions.PipError", "7.0.0", "9999") -RequirementPreparer = pip_import("RequirementPreparer", "operations.prepare.RequirementPreparer", "7", "9999") -RequirementSet = pip_import("RequirementSet", "req.req_set.RequirementSet", "7.0.0", "9999") -RequirementTracker = pip_import("RequirementTracker", "req.req_tracker.RequirementTracker", "7.0.0", "9999") -Resolver = pip_import("Resolver", "resolve.Resolver", "7.0.0", "9999") -SafeFileCache = pip_import("SafeFileCache", "download.SafeFileCache", "7.0.0", "9999") -url_to_path = pip_import("url_to_path", "download.url_to_path", "7.0.0", "9999") -USER_CACHE_DIR = pip_import("USER_CACHE_DIR", "locations.USER_CACHE_DIR", "7.0.0", "9999") -VcsSupport = pip_import("VcsSupport", "vcs.VcsSupport", "7.0.0", "9999") -Wheel = pip_import("Wheel", "wheel.Wheel", "7.0.0", "9999") -WheelCache = pip_import("WheelCache", ("cache.WheelCache", "10.0.0", "9999"), ("wheel.WheelCache", "7", "9.0.3")) -WheelBuilder = pip_import("WheelBuilder", "wheel.WheelBuilder", "7.0.0", "9999") - - -if not RequirementTracker: + module_paths = [self.get_pathinfo(pth) for pth in module_paths] + search_order = self.sort_paths(module_paths, base_path) + return self.get_package_from_modules(search_order) + + def do_import(self, *args, **kwargs): + return self._import(*args, **kwargs) @contextmanager - def RequirementTracker(): - yield + def nullcontext(self, *args, **kwargs): + try: + yield + finally: + pass + + def get_pathinfo(self, module_path): + assert isinstance(module_path, (list, tuple)) + module_path, start_version, end_version = module_path + return self.path_info(module_path, self._parse(start_version), self._parse(end_version)) + + +old_module = sys.modules[__name__] if __name__ in sys.modules else None +module = sys.modules[__name__] = _shims() +module.__dict__.update({ + '__file__': __file__, + '__package__': __package__, + '__doc__': __doc__, + '__all__': module.__all__, + '__name__': __name__, +}) diff --git a/pipenv/vendor/plette/__init__.py b/pipenv/vendor/plette/__init__.py index c99c1bc1c1..8099f0b1ba 100644 --- a/pipenv/vendor/plette/__init__.py +++ b/pipenv/vendor/plette/__init__.py @@ -3,7 +3,7 @@ "Lockfile", "Pipfile", ] -__version__ = '0.1.1' +__version__ = '0.2.2' from .lockfiles import Lockfile from .pipfiles import Pipfile diff --git a/pipenv/vendor/plette/lockfiles.py b/pipenv/vendor/plette/lockfiles.py index fe97a52149..10df07e1cb 100644 --- a/pipenv/vendor/plette/lockfiles.py +++ b/pipenv/vendor/plette/lockfiles.py @@ -1,6 +1,12 @@ from __future__ import unicode_literals import json +import numbers + +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc import six @@ -44,6 +50,20 @@ def iterencode(self, obj): PIPFILE_SPEC_CURRENT = 6 +def _copy_jsonsafe(value): + """Deep-copy a value into JSON-safe types. + """ + if isinstance(value, six.string_types + (numbers.Number,)): + return value + if isinstance(value, collections_abc.Mapping): + return {six.text_type(k): _copy_jsonsafe(v) for k, v in value.items()} + if isinstance(value, collections_abc.Iterable): + return [_copy_jsonsafe(v) for v in value] + if value is None: # This doesn't happen often for us. + return None + return six.text_type(value) + + class Lockfile(DataView): """Representation of a Pipfile.lock. """ @@ -71,10 +91,10 @@ def load(cls, f, encoding=None): def with_meta_from(cls, pipfile): data = { "_meta": { - "hash": pipfile.get_hash()._data, + "hash": _copy_jsonsafe(pipfile.get_hash()._data), "pipfile-spec": PIPFILE_SPEC_CURRENT, - "requires": pipfile._data.get("requires", {}).copy(), - "sources": pipfile.sources._data.copy(), + "requires": _copy_jsonsafe(pipfile._data.get("requires", {})), + "sources": _copy_jsonsafe(pipfile.sources._data), }, "default": {}, "develop": {}, diff --git a/pipenv/vendor/plette/models/base.py b/pipenv/vendor/plette/models/base.py index e8bbd4faf7..d70752eec0 100644 --- a/pipenv/vendor/plette/models/base.py +++ b/pipenv/vendor/plette/models/base.py @@ -31,8 +31,7 @@ class DataView(object): """A "view" to a data. Validates the input mapping on creation. A subclass is expected to - provide a `__SCHEMA__` class attribute specifying a validator schema, - or a concrete Cerberus validator object. + provide a `__SCHEMA__` class attribute specifying a validator schema. """ def __init__(self, data): self.validate(data) @@ -54,6 +53,9 @@ def __getitem__(self, key): def __setitem__(self, key, value): self._data[key] = value + def __delitem__(self, key): + del self._data[key] + def get(self, key, default=None): try: return self[key] @@ -66,11 +68,12 @@ def validate(cls, data): class DataViewCollection(DataView): - """A collection of dataview. + """A homogeneous collection of data views. Subclasses are expected to assign a class attribute `item_class` to specify - how items should be coerced when accessed. The item class should conform to - the `DataView` protocol. + the type of items it contains. This class will be used to coerce return + values when accessed. The item class should conform to the `DataView` + protocol. You should not instantiate an instance from this class, but from one of its subclasses instead. @@ -87,7 +90,7 @@ def __getitem__(self, key): return self.item_class(self._data[key]) def __setitem__(self, key, value): - if isinstance(value, self.item_class): + if isinstance(value, DataView): value = value._data self._data[key] = value @@ -96,7 +99,7 @@ def __delitem__(self, key): class DataViewMapping(DataViewCollection): - """A mapping of dataview. + """A mapping of data views. The keys are primitive values, while values are instances of `item_class`. """ @@ -119,7 +122,7 @@ def items(self): class DataViewSequence(DataViewCollection): - """A sequence of dataview. + """A sequence of data views. Each entry is an instance of `item_class`. """ @@ -130,3 +133,13 @@ def validate(cls, data): def __iter__(self): return (self.item_class(d) for d in self._data) + + def __getitem__(self, key): + if isinstance(key, slice): + return type(self)(self._data[key]) + return super(DataViewSequence, self).__getitem__(key) + + def append(self, value): + if isinstance(value, DataView): + value = value._data + self._data.append(value) diff --git a/pipenv/vendor/pyparsing.py b/pipenv/vendor/pyparsing.py index 69be39f6ac..cf38419bcb 100644 --- a/pipenv/vendor/pyparsing.py +++ b/pipenv/vendor/pyparsing.py @@ -1,6 +1,6 @@ # module pyparsing.py # -# Copyright (c) 2003-2016 Paul T. McGuire +# Copyright (c) 2003-2018 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -25,6 +25,7 @@ __doc__ = \ """ pyparsing module - Classes and methods to define and execute parsing grammars +============================================================================= The pyparsing module is an alternative approach to creating and executing simple grammars, vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you @@ -58,10 +59,23 @@ class names, and the use of '+', '|' and '^' operators. - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - quoted strings - embedded comments + + +Getting Started - +----------------- +Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing +classes inherit from. Use the docstrings for examples of how to: + - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes + - construct character word-group expressions using the L{Word} class + - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes + - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones + - associate names with your parsed results using L{ParserElement.setResultsName} + - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} + - find more useful common expressions in the L{pyparsing_common} namespace class """ -__version__ = "2.2.0" -__versionTime__ = "06 Mar 2017 02:06 UTC" +__version__ = "2.2.2" +__versionTime__ = "29 Sep 2018 15:58 UTC" __author__ = "Paul McGuire " import string @@ -82,6 +96,15 @@ class names, and the use of '+', '|' and '^' operators. except ImportError: from threading import RLock +try: + # Python 3 + from collections.abc import Iterable + from collections.abc import MutableMapping +except ImportError: + # Python 2.7 + from collections import Iterable + from collections import MutableMapping + try: from collections import OrderedDict as _OrderedDict except ImportError: @@ -940,7 +963,7 @@ def __getnewargs__(self): def __dir__(self): return (dir(type(self)) + list(self.keys())) -collections.MutableMapping.register(ParseResults) +MutableMapping.register(ParseResults) def col (loc,strg): """Returns current column within a string, counting newlines as line separators. @@ -1025,11 +1048,11 @@ def extract_stack(limit=0): # special handling for Python 3.5.0 - extra deep call stack by 1 offset = -3 if system_version == (3,5,0) else -2 frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] - return [(frame_summary.filename, frame_summary.lineno)] + return [frame_summary[:2]] def extract_tb(tb, limit=0): frames = traceback.extract_tb(tb, limit=limit) frame_summary = frames[-1] - return [(frame_summary.filename, frame_summary.lineno)] + return [frame_summary[:2]] else: extract_stack = traceback.extract_stack extract_tb = traceback.extract_tb @@ -1374,7 +1397,7 @@ def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): else: preloc = loc tokensStart = preloc - if self.mayIndexError or loc >= len(instring): + if self.mayIndexError or preloc >= len(instring): try: loc,tokens = self.parseImpl( instring, preloc, doActions ) except IndexError: @@ -1408,7 +1431,6 @@ def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): self.resultsName, asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), modal=self.modalResults ) - if debugging: #~ print ("Matched",self,"->",retTokens.asList()) if (self.debugActions[1] ): @@ -2754,7 +2776,7 @@ class Regex(Token): roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") """ compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0): + def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False): """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" super(Regex,self).__init__() @@ -2787,6 +2809,8 @@ def __init__( self, pattern, flags=0): self.errmsg = "Expected " + self.name self.mayIndexError = False self.mayReturnEmpty = True + self.asGroupList = asGroupList + self.asMatch = asMatch def parseImpl( self, instring, loc, doActions=True ): result = self.re.match(instring,loc) @@ -2795,10 +2819,15 @@ def parseImpl( self, instring, loc, doActions=True ): loc = result.end() d = result.groupdict() - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] + if self.asMatch: + ret = result + elif self.asGroupList: + ret = result.groups() + else: + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] return loc,ret def __str__( self ): @@ -2812,6 +2841,28 @@ def __str__( self ): return self.strRepr + def sub(self, repl): + """ + Return Regex with an attached parse action to transform the parsed + result as if called using C{re.sub(expr, repl, string)}. + """ + if self.asGroupList: + warnings.warn("cannot use sub() with Regex(asGroupList=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch and callable(repl): + warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch: + def pa(tokens): + return tokens[0].expand(repl) + else: + def pa(tokens): + return self.re.sub(repl, tokens[0]) + return self.addParseAction(pa) class QuotedString(Token): r""" @@ -3242,7 +3293,7 @@ def __init__( self, exprs, savelist = False ): if isinstance( exprs, basestring ): self.exprs = [ ParserElement._literalStringClass( exprs ) ] - elif isinstance( exprs, collections.Iterable ): + elif isinstance( exprs, Iterable ): exprs = list(exprs) # if sequence of strings provided, wrap with Literal if all(isinstance(expr, basestring) for expr in exprs): @@ -4062,7 +4113,7 @@ def __init__( self, other, include=False, ignore=None, failOn=None ): self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include - self.asList = False + self.saveAsList = False if isinstance(failOn, basestring): self.failOn = ParserElement._literalStringClass(failOn) else: @@ -4393,7 +4444,7 @@ def traceParseAction(f): @traceParseAction def remove_duplicate_chars(tokens): - return ''.join(sorted(set(''.join(tokens))) + return ''.join(sorted(set(''.join(tokens)))) wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) print(wds.parseString("slkdjs sld sldd sdlf sdljf")) @@ -4583,7 +4634,7 @@ def oneOf( strs, caseless=False, useRegex=True ): symbols = [] if isinstance(strs,basestring): symbols = strs.split() - elif isinstance(strs, collections.Iterable): + elif isinstance(strs, Iterable): symbols = list(strs) else: warnings.warn("Invalid argument to oneOf, expected string or iterable", @@ -4734,7 +4785,7 @@ def locatedExpr(expr): _escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) _escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) _escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) -_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) _charRange = Group(_singleChar + Suppress("-") + _singleChar) _reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index f6ef476b80..586be00293 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -1,6 +1,6 @@ from __future__ import print_function, absolute_import -__version__ = '1.0.2' +__version__ = '1.1.0' __all__ = ["Finder", "WindowsFinder", "SystemPath", "InvalidPythonVersion"] from .pythonfinder import Finder diff --git a/pipenv/vendor/pythonfinder/cli.py b/pipenv/vendor/pythonfinder/cli.py index d285fb29d4..64d3372d0b 100644 --- a/pipenv/vendor/pythonfinder/cli.py +++ b/pipenv/vendor/pythonfinder/cli.py @@ -8,7 +8,6 @@ from .pythonfinder import Finder -# @click.group(invoke_without_command=True, context_settings=CONTEXT_SETTINGS) @click.command() @click.option("--find", default=False, nargs=1, help="Find a specific python version.") @click.option("--which", default=False, nargs=1, help="Run the which command.") @@ -29,6 +28,23 @@ def cli(ctx, find=False, which=False, findall=False, version=False): ) sys.exit(0) finder = Finder() + if findall: + versions = finder.find_all_python_versions() + if versions: + click.secho("Found python at the following locations:", fg="green") + for v in versions: + py = v.py_version + click.secho( + "Python: {py.version!s} ({py.architecture!s}) @ {py.comes_from.path!s}".format( + py=py + ), + fg="yellow", + ) + else: + click.secho( + "ERROR: No valid python versions found! Check your path and try again.", + fg="red", + ) if find: if any([find.startswith("{0}".format(n)) for n in range(10)]): diff --git a/pipenv/vendor/pythonfinder/models/__init__.py b/pipenv/vendor/pythonfinder/models/__init__.py index ef0ed36830..e788c21cf4 100644 --- a/pipenv/vendor/pythonfinder/models/__init__.py +++ b/pipenv/vendor/pythonfinder/models/__init__.py @@ -12,22 +12,3 @@ from .path import SystemPath from .python import PythonVersion from .windows import WindowsFinder - - -@six.add_metaclass(abc.ABCMeta) -class BaseFinder(object): - def get_versions(self): - """Return the available versions from the finder""" - raise NotImplementedError - - @classmethod - def create(cls): - raise NotImplementedError - - @property - def version_paths(self): - return self.versions.values() - - @property - def expanded_paths(self): - return (p.paths.values() for p in self.version_paths) diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 8a40e2482d..0fcce6c850 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -11,8 +11,6 @@ from packaging.version import Version from packaging.version import parse as parse_version -from vistir.compat import Path - from ..environment import SYSTEM_ARCH from ..utils import ( _filter_none, ensure_path, get_python_version, optional_instance_of diff --git a/pipenv/vendor/pythonfinder/models/windows.py b/pipenv/vendor/pythonfinder/models/windows.py index 4f0b64d129..fcb4d42a53 100644 --- a/pipenv/vendor/pythonfinder/models/windows.py +++ b/pipenv/vendor/pythonfinder/models/windows.py @@ -60,7 +60,10 @@ def get_versions(self): env_versions = pep514env.findall() path = None for version_object in env_versions: - path = ensure_path(version_object.info.install_path.__getattr__("")) + install_path = getattr(version_object.info, 'install_path', None) + if install_path is None: + continue + path = ensure_path(install_path.__getattr__("")) try: py_version = PythonVersion.from_windows_launcher(version_object) except InvalidPythonVersion: diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index 285e06be58..6494d2439e 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -14,8 +14,6 @@ import vistir -from vistir.compat import Path - from .exceptions import InvalidPythonVersion @@ -31,7 +29,7 @@ def get_python_version(path): """Get python version string using subprocess from a given path.""" version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] try: - out, _ = vistir.misc.run(version_cmd) + out, _ = vistir.misc.run(version_cmd, block=True, nospin=True) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not out: @@ -44,7 +42,7 @@ def optional_instance_of(cls): def path_and_exists(path): - return attr.validators.instance_of(Path) and path.exists() + return attr.validators.instance_of(vistir.compat.Path) and path.exists() def path_is_executable(path): @@ -87,9 +85,9 @@ def ensure_path(path): :rtype: :class:`~pathlib.Path` """ - if isinstance(path, Path): + if isinstance(path, vistir.compat.Path): path = path.as_posix() - path = Path(os.path.expandvars(path)) + path = vistir.compat.Path(os.path.expandvars(path)) try: path = path.resolve() except OSError: @@ -105,8 +103,8 @@ def _filter_none(k, v): def filter_pythons(path): """Return all valid pythons in a given path""" - if not isinstance(path, Path): - path = Path(str(path)) + if not isinstance(path, vistir.compat.Path): + path = vistir.compat.Path(str(path)) if not path.is_dir(): return path if path_is_python(path) else None return filter(lambda x: path_is_python(x), path.iterdir()) diff --git a/pipenv/vendor/pytoml/parser.py b/pipenv/vendor/pytoml/parser.py index e03a03fbda..9f94e9230a 100644 --- a/pipenv/vendor/pytoml/parser.py +++ b/pipenv/vendor/pytoml/parser.py @@ -223,8 +223,8 @@ def _p_key(s): _datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') _basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') -_litstr_re = re.compile(r"[^'\000-\037]*") -_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*") +_litstr_re = re.compile(r"[^'\000\010\012-\037]*") +_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*") def _p_value(s, object_pairs_hook): pos = s.pos() diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 01a56efa6e..ddfbcf98d0 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.1.6' +__version__ = '1.1.7' from .exceptions import RequirementError diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py index 90678344bb..ae643517f4 100644 --- a/pipenv/vendor/requirementslib/models/dependencies.py +++ b/pipenv/vendor/requirementslib/models/dependencies.py @@ -274,8 +274,11 @@ def get_dependencies(ireq, sources=None, parent=None): ireq, "project_name", getattr(ireq, "project", ireq.name), ) - version = getattr(ireq, "version") - ireq = InstallRequirement.from_line("{0}=={1}".format(name, version)) + version = getattr(ireq, "version", None) + if not version: + ireq = InstallRequirement.from_line("{0}".format(name)) + else: + ireq = InstallRequirement.from_line("{0}=={1}".format(name, version)) pip_options = get_pip_options(sources=sources) getters = [ get_dependencies_from_cache, diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 13a3a60ec9..73de6d2db5 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -29,7 +29,7 @@ ) from ..exceptions import RequirementError -from ..utils import VCS_LIST, is_installable_file, is_vcs +from ..utils import VCS_LIST, is_installable_file, is_vcs, ensure_setup_py from .baserequirement import BaseRequirement from .dependencies import ( AbstractDependency, find_all_matches, get_abstract_dependencies, @@ -760,7 +760,7 @@ def hashes_as_pip(self): @property def markers_as_pip(self): if self.markers: - return "; {0}".format(self.markers).replace('"', "'") + return " ; {0}".format(self.markers).replace('"', "'") return "" @@ -1054,7 +1054,8 @@ def as_ireq(self): if self.editable or self.req.editable: if ireq_line.startswith("-e "): ireq_line = ireq_line[len("-e "):] - ireq = InstallRequirement.from_editable(ireq_line) + with ensure_setup_py(self.req.path): + ireq = InstallRequirement.from_editable(ireq_line) else: ireq = InstallRequirement.from_line(ireq_line) if not getattr(ireq, "req", None): diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 2c6ef7e00b..de9acb53e6 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -508,5 +508,3 @@ def fix_requires_python_marker(requires_python): ]) marker_to_add = PackagingRequirement('fakepkg; {0}'.format(marker_str)).marker return marker_to_add - - diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py index 5d3ec08fc9..fb2e6bc30f 100644 --- a/pipenv/vendor/requirementslib/models/vcs.py +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -52,7 +52,7 @@ def update(self, ref): sha = self.repo_instance.get_revision_sha(self.checkout_directory, target_ref.arg_rev) target_rev = target_ref.make_new(sha) if parse_version(pip_version) > parse_version("18.0"): - self.repo_instance.update(self.checkout_directory, self.url, target_rev) + self.repo_instance.update(self.checkout_directory, self.url, target_ref) else: self.repo_instance.update(self.checkout_directory, target_ref) self.commit_hash = self.get_commit_hash(ref) diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index c41f967578..b490d3cf8c 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -1,21 +1,23 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import +import contextlib import logging import os import six +import tomlkit from six.moves.urllib.parse import urlparse, urlsplit -from pip_shims import ( - Command, VcsSupport, cmdoptions, is_archive_file, is_installable_dir +from pip_shims.shims import ( + Command, VcsSupport, cmdoptions, is_archive_file, + is_installable_dir as _is_installable_dir ) from vistir.compat import Path -from vistir.path import is_valid_url, ensure_mkdir_p +from vistir.path import is_valid_url, ensure_mkdir_p, create_tracked_tempdir -VCS_ACCESS = VcsSupport() VCS_LIST = ("git", "svn", "hg", "bzr") VCS_SCHEMES = [] SCHEME_LIST = ("http://", "https://", "ftp://", "ftps://", "file://") @@ -37,6 +39,19 @@ def setup_logger(): log = setup_logger() +def is_installable_dir(path): + if _is_installable_dir(path): + return True + path = Path(path) + pyproject = path.joinpath("pyproject.toml") + if pyproject.exists(): + pyproject_toml = tomlkit.loads(pyproject.read_text()) + build_system = pyproject_toml.get("build-system", {}).get("build-backend", "") + if build_system: + return True + return False + + def is_vcs(pipfile_entry): """Determine if dictionary entry from Pipfile is for a vcs dependency.""" if hasattr(pipfile_entry, "keys"): @@ -150,3 +165,19 @@ def get_pip_command(): @ensure_mkdir_p(mode=0o777) def _ensure_dir(path): return path + + +@contextlib.contextmanager +def ensure_setup_py(base_dir): + if not base_dir: + base_dir = create_tracked_tempdir(prefix="requirementslib-setup") + base_dir = Path(base_dir) + setup_py = base_dir.joinpath("setup.py") + is_new = False if setup_py.exists() else True + if not setup_py.exists(): + setup_py.write_text(u"") + try: + yield + finally: + if is_new: + setup_py.unlink() diff --git a/pipenv/vendor/shellingham/__init__.py b/pipenv/vendor/shellingham/__init__.py index a415c4a967..90c00abbdc 100644 --- a/pipenv/vendor/shellingham/__init__.py +++ b/pipenv/vendor/shellingham/__init__.py @@ -4,7 +4,7 @@ from ._core import ShellDetectionFailure -__version__ = '1.2.4' +__version__ = '1.2.6' def detect_shell(pid=None, max_depth=6): diff --git a/pipenv/vendor/shellingham/posix.py b/pipenv/vendor/shellingham/posix.py index b25dd87456..0bbf988b90 100644 --- a/pipenv/vendor/shellingham/posix.py +++ b/pipenv/vendor/shellingham/posix.py @@ -21,7 +21,7 @@ def _get_process_mapping(): processes = {} for line in output.split('\n'): try: - pid, ppid, args = line.strip().split(maxsplit=2) + pid, ppid, args = line.strip().split(None, 2) except ValueError: continue processes[pid] = Process( diff --git a/pipenv/vendor/shellingham/posix/_default.py b/pipenv/vendor/shellingham/posix/_default.py deleted file mode 100644 index 8694427611..0000000000 --- a/pipenv/vendor/shellingham/posix/_default.py +++ /dev/null @@ -1,27 +0,0 @@ -import collections -import shlex -import subprocess -import sys - - -Process = collections.namedtuple('Process', 'args pid ppid') - - -def get_process_mapping(): - """Try to look up the process tree via the output of `ps`. - """ - output = subprocess.check_output([ - 'ps', '-ww', '-o', 'pid=', '-o', 'ppid=', '-o', 'args=', - ]) - if not isinstance(output, str): - output = output.decode(sys.stdout.encoding) - processes = {} - for line in output.split('\n'): - try: - pid, ppid, args = line.strip().split(None, 2) - except ValueError: - continue - processes[pid] = Process( - args=tuple(shlex.split(args)), pid=pid, ppid=ppid, - ) - return processes diff --git a/pipenv/vendor/shellingham/posix/_proc.py b/pipenv/vendor/shellingham/posix/_proc.py index 921f250819..e3a6e46db0 100644 --- a/pipenv/vendor/shellingham/posix/_proc.py +++ b/pipenv/vendor/shellingham/posix/_proc.py @@ -1,40 +1,34 @@ import os import re -from ._core import Process +from ._default import Process STAT_PPID = 3 STAT_TTY = 6 -STAT_PATTERN = re.compile(r'\(.+\)|\S+') - - -def _get_stat(pid): - with open(os.path.join('/proc', str(pid), 'stat')) as f: - parts = STAT_PATTERN.findall(f.read()) - return parts[STAT_TTY], parts[STAT_PPID] - - -def _get_cmdline(pid): - with open(os.path.join('/proc', str(pid), 'cmdline')) as f: - return tuple(f.read().split('\0')[:-1]) - def get_process_mapping(): """Try to look up the process tree via the /proc interface. """ - self_tty = _get_stat(os.getpid())[0] + with open('/proc/{0}/stat'.format(os.getpid())) as f: + self_tty = f.read().split()[STAT_TTY] processes = {} for pid in os.listdir('/proc'): if not pid.isdigit(): continue try: - tty, ppid = _get_stat(pid) - if tty != self_tty: - continue - args = _get_cmdline(pid) - processes[pid] = Process(args=args, pid=pid, ppid=ppid) + stat = '/proc/{0}/stat'.format(pid) + cmdline = '/proc/{0}/cmdline'.format(pid) + with open(stat) as fstat, open(cmdline) as fcmdline: + stat = re.findall(r'\(.+\)|\S+', fstat.read()) + cmd = fcmdline.read().split('\x00')[:-1] + ppid = stat[STAT_PPID] + tty = stat[STAT_TTY] + if tty == self_tty: + processes[pid] = Process( + args=tuple(cmd), pid=pid, ppid=ppid, + ) except IOError: # Process has disappeared - just ignore it. continue diff --git a/pipenv/vendor/shellingham/posix/_ps.py b/pipenv/vendor/shellingham/posix/_ps.py index e96278cf5f..8694427611 100644 --- a/pipenv/vendor/shellingham/posix/_ps.py +++ b/pipenv/vendor/shellingham/posix/_ps.py @@ -1,8 +1,10 @@ +import collections import shlex import subprocess import sys -from ._core import Process + +Process = collections.namedtuple('Process', 'args pid ppid') def get_process_mapping(): diff --git a/pipenv/vendor/shellingham/posix/linux.py b/pipenv/vendor/shellingham/posix/linux.py deleted file mode 100644 index 6db9783481..0000000000 --- a/pipenv/vendor/shellingham/posix/linux.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import re - -from ._default import Process - - -STAT_PPID = 3 -STAT_TTY = 6 - - -def get_process_mapping(): - """Try to look up the process tree via Linux's /proc - """ - with open('/proc/{0}/stat'.format(os.getpid())) as f: - self_tty = f.read().split()[STAT_TTY] - processes = {} - for pid in os.listdir('/proc'): - if not pid.isdigit(): - continue - try: - stat = '/proc/{0}/stat'.format(pid) - cmdline = '/proc/{0}/cmdline'.format(pid) - with open(stat) as fstat, open(cmdline) as fcmdline: - stat = re.findall(r'\(.+\)|\S+', fstat.read()) - cmd = fcmdline.read().split('\x00')[:-1] - ppid = stat[STAT_PPID] - tty = stat[STAT_TTY] - if tty == self_tty: - processes[pid] = Process( - args=tuple(cmd), pid=pid, ppid=ppid, - ) - except IOError: - # Process has disappeared - just ignore it. - continue - return processes diff --git a/pipenv/vendor/shellingham/posix/proc.py b/pipenv/vendor/shellingham/posix/proc.py index e64a5fe21f..89d6c8e928 100644 --- a/pipenv/vendor/shellingham/posix/proc.py +++ b/pipenv/vendor/shellingham/posix/proc.py @@ -1,5 +1,7 @@ +import io import os import re +import sys from ._core import Process @@ -27,13 +29,21 @@ def detect_proc(): def _get_stat(pid, name): - with open(os.path.join('/proc', str(pid), name)) as f: + path = os.path.join('/proc', str(pid), name) + with io.open(path, encoding='ascii', errors='replace') as f: + # We only care about TTY and PPID -- all numbers. parts = STAT_PATTERN.findall(f.read()) return parts[STAT_TTY], parts[STAT_PPID] def _get_cmdline(pid): - with open(os.path.join('/proc', str(pid), 'cmdline')) as f: + path = os.path.join('/proc', str(pid), 'cmdline') + encoding = sys.getfilesystemencoding() or 'utf-8' + with io.open(path, encoding=encoding, errors='replace') as f: + # XXX: Command line arguments can be arbitrary byte sequences, not + # necessarily decodable. For Shellingham's purpose, however, we don't + # care. (pypa/pipenv#2820) + # cmdline appends an extra NULL at the end, hence the [:-1]. return tuple(f.read().split('\0')[:-1]) diff --git a/pipenv/vendor/toml.py b/pipenv/vendor/toml.py index 0bf04ae516..dac398837b 100644 --- a/pipenv/vendor/toml.py +++ b/pipenv/vendor/toml.py @@ -6,9 +6,10 @@ import io import datetime from os import linesep +import sys -__version__ = "0.9.4" -__spec__ = "0.4.0" +__version__ = "0.9.6" +_spec_ = "0.4.0" class TomlDecodeError(Exception): @@ -230,7 +231,7 @@ def loads(s, _dict=dict): if item == '[' and (not openstring and not keygroup and not arrayoftables): if beginline: - if sl[i + 1] == '[': + if len(sl) > i + 1 and sl[i + 1] == '[': arrayoftables = True else: keygroup = True @@ -282,7 +283,10 @@ def loads(s, _dict=dict): if len(line) > 2 and (line[-1] == multilinestr[0] and line[-2] == multilinestr[0] and line[-3] == multilinestr[0]): - value, vtype = _load_value(multilinestr, _dict) + try: + value, vtype = _load_value(multilinestr, _dict) + except ValueError as err: + raise TomlDecodeError(str(err)) currentlevel[multikey] = value multikey = None multilinestr = "" @@ -298,23 +302,42 @@ def loads(s, _dict=dict): continue if line[0] == '[': arrayoftables = False + if len(line) == 1: + raise TomlDecodeError("Opening key group bracket on line by " + "itself.") if line[1] == '[': arrayoftables = True - line = line[2:].split(']]', 1) + line = line[2:] + splitstr = ']]' else: - line = line[1:].split(']', 1) - if line[1].strip() != "": + line = line[1:] + splitstr = ']' + i = 1 + quotesplits = _get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and splitstr in quotesplit: + break + i += quotesplit.count(splitstr) + quoted = not quoted + line = line.split(splitstr, i) + if len(line) < i + 1 or line[-1].strip() != "": raise TomlDecodeError("Key group not on a line by itself.") - groups = line[0].split('.') + groups = splitstr.join(line[:-1]).split('.') i = 0 while i < len(groups): groups[i] = groups[i].strip() - if groups[i][0] == '"' or groups[i][0] == "'": + if len(groups[i]) > 0 and (groups[i][0] == '"' or + groups[i][0] == "'"): groupstr = groups[i] j = i + 1 while not groupstr[0] == groupstr[-1]: j += 1 - groupstr = '.'.join(groups[i:j]) + if j > len(groups) + 2: + raise TomlDecodeError("Invalid group name '" + + groupstr + "' Something " + + "went wrong.") + groupstr = '.'.join(groups[i:j]).strip() groups[i] = groupstr[1:-1] groups[i + 1:j] = [] else: @@ -366,11 +389,17 @@ def loads(s, _dict=dict): if line[-1] != "}": raise TomlDecodeError("Line breaks are not allowed in inline" "objects") - _load_inline_object(line, currentlevel, _dict, multikey, - multibackslash) + try: + _load_inline_object(line, currentlevel, _dict, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err)) elif "=" in line: - ret = _load_line(line, currentlevel, _dict, multikey, - multibackslash) + try: + ret = _load_line(line, currentlevel, _dict, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err)) if ret is not None: multikey, multilinestr, multibackslash = ret return retval @@ -387,15 +416,18 @@ def _load_inline_object(line, currentlevel, _dict, multikey=False, try: _, value = candidate_group.split('=', 1) except ValueError: - raise TomlDecodeError("Invalid inline table encountered") + raise ValueError("Invalid inline table encountered") value = value.strip() if ((value[0] == value[-1] and value[0] in ('"', "'")) or ( value[0] in '-0123456789' or value in ('true', 'false') or - (value[0] == "[" and value[-1] == "]"))): + (value[0] == "[" and value[-1] == "]") or + (value[0] == '{' and value[-1] == '}'))): groups.append(candidate_group) - else: + elif len(candidate_groups) > 0: candidate_groups[0] = candidate_group + "," + candidate_groups[0] + else: + raise ValueError("Invalid inline table value encountered") for group in groups: status = _load_line(group, currentlevel, _dict, multikey, multibackslash) @@ -430,8 +462,38 @@ def _strictly_valid_num(n): return True +def _get_split_on_quotes(line): + doublequotesplits = line.split('"') + quoted = False + quotesplits = [] + if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]: + singlequotesplits = doublequotesplits[0].split("'") + doublequotesplits = doublequotesplits[1:] + while len(singlequotesplits) % 2 == 0 and len(doublequotesplits): + singlequotesplits[-1] += '"' + doublequotesplits[0] + doublequotesplits = doublequotesplits[1:] + if "'" in singlequotesplits[-1]: + singlequotesplits = (singlequotesplits[:-1] + + singlequotesplits[-1].split("'")) + quotesplits += singlequotesplits + for doublequotesplit in doublequotesplits: + if quoted: + quotesplits.append(doublequotesplit) + else: + quotesplits += doublequotesplit.split("'") + quoted = not quoted + return quotesplits + + def _load_line(line, currentlevel, _dict, multikey, multibackslash): i = 1 + quotesplits = _get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and '=' in quotesplit: + break + i += quotesplit.count('=') + quoted = not quoted pair = line.split('=', i) strictly_valid = _strictly_valid_num(pair[-1]) if _number_with_underscores.match(pair[-1]): @@ -451,7 +513,7 @@ def _load_line(line, currentlevel, _dict, multikey, multibackslash): prev_val = pair[-1] pair = line.split('=', i) if prev_val == pair[-1]: - raise TomlDecodeError("Invalid date or number") + raise ValueError("Invalid date or number") if strictly_valid: strictly_valid = _strictly_valid_num(pair[-1]) pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()] @@ -478,7 +540,7 @@ def _load_line(line, currentlevel, _dict, multikey, multibackslash): value, vtype = _load_value(pair[1], _dict, strictly_valid) try: currentlevel[pair[0]] - raise TomlDecodeError("Duplicate keys!") + raise ValueError("Duplicate keys!") except KeyError: if multikey: return multikey, multilinestr, multibackslash @@ -492,13 +554,28 @@ def _load_date(val): try: if len(val) > 19: if val[19] == '.': - microsecond = int(val[20:26]) - if len(val) > 26: - tz = TomlTz(val[26:32]) + if val[-1].upper() == 'Z': + subsecondval = val[20:-1] + tzval = "Z" + else: + subsecondvalandtz = val[20:] + if '+' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('+') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + elif '-' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('-') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + tz = TomlTz(tzval) + microsecond = int(int(subsecondval) * + (10 ** (6 - len(subsecondval)))) else: - tz = TomlTz(val[19:25]) + tz = TomlTz(val[19:]) except ValueError: tz = None + if "-" not in val[1:]: + return None try: d = datetime.datetime( int(val[:4]), int(val[5:7]), @@ -510,8 +587,6 @@ def _load_date(val): def _load_unicode_escapes(v, hexbytes, prefix): - hexchars = ['0', '1', '2', '3', '4', '5', '6', '7', - '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] skip = False i = len(v) - 1 while i > -1 and v[i] == '\\': @@ -532,14 +607,12 @@ def _load_unicode_escapes(v, hexbytes, prefix): hxblen = 4 if prefix == "\\U": hxblen = 8 - while i < hxblen: - try: - if not hx[i].lower() in hexchars: - raise IndexError("This is a hack") - except IndexError: - raise TomlDecodeError("Invalid escape sequence") - hxb += hx[i].lower() - i += 1 + hxb = ''.join(hx[i:i + hxblen]).lower() + if hxb.strip('0123456789abcdef'): + raise ValueError("Invalid escape sequence: " + hxb) + if hxb[0] == "d" and hxb[1].strip('01234567'): + raise ValueError("Invalid escape sequence: " + hxb + + ". Only scalar unicode points are allowed.") v += unichr(int(hxb, 16)) v += unicode(hx[len(hxb):]) return v @@ -569,7 +642,7 @@ def _unescape(v): elif v[i] == 'u' or v[i] == 'U': i += 1 else: - raise TomlDecodeError("Reserved escape sequence used") + raise ValueError("Reserved escape sequence used") continue elif v[i] == '\\': backslash = True @@ -579,19 +652,25 @@ def _unescape(v): def _load_value(v, _dict, strictly_valid=True): if not v: - raise TomlDecodeError("Empty value is invalid") + raise ValueError("Empty value is invalid") if v == 'true': return (True, "bool") elif v == 'false': return (False, "bool") elif v[0] == '"': testv = v[1:].split('"') - if testv[0] == '' and testv[1] == '': - testv = testv[2:-2] + triplequote = False + triplequotecount = 0 + if len(testv) > 1 and testv[0] == '' and testv[1] == '': + testv = testv[2:] + triplequote = True closed = False for tv in testv: if tv == '': - closed = True + if triplequote: + triplequotecount += 1 + else: + closed = True else: oddbackslash = False try: @@ -605,9 +684,12 @@ def _load_value(v, _dict, strictly_valid=True): pass if not oddbackslash: if closed: - raise TomlDecodeError("Stuff after closed string. WTF?") + raise ValueError("Stuff after closed string. WTF?") else: - closed = True + if not triplequote or triplequotecount > 1: + closed = True + else: + triplequotecount = 0 escapeseqs = v.split('\\')[1:] backslash = False for i in escapeseqs: @@ -616,7 +698,7 @@ def _load_value(v, _dict, strictly_valid=True): else: if i[0] not in _escapes and (i[0] != 'u' and i[0] != 'U' and not backslash): - raise TomlDecodeError("Reserved escape sequence used") + raise ValueError("Reserved escape sequence used") if backslash: backslash = False for prefix in ["\\u", "\\U"]: @@ -624,7 +706,7 @@ def _load_value(v, _dict, strictly_valid=True): hexbytes = v.split(prefix) v = _load_unicode_escapes(hexbytes[0], hexbytes[1:], prefix) v = _unescape(v) - if v[1] == '"' and (len(v) < 3 or v[1] == v[2]): + if len(v) > 1 and v[1] == '"' and (len(v) < 3 or v[1] == v[2]): v = v[2:-2] return (v[1:-1], "str") elif v[0] == "'": @@ -642,8 +724,8 @@ def _load_value(v, _dict, strictly_valid=True): if parsed_date is not None: return (parsed_date, "date") if not strictly_valid: - raise TomlDecodeError("Weirdness with leading zeroes or underscores" - " in your number.") + raise ValueError("Weirdness with leading zeroes or " + "underscores in your number.") itype = "int" neg = False if v[0] == '-': @@ -654,10 +736,10 @@ def _load_value(v, _dict, strictly_valid=True): v = v.replace('_', '') if '.' in v or 'e' in v or 'E' in v: if '.' in v and v.split('.', 1)[1] == '': - raise TomlDecodeError("This float is missing digits after " - "the point") + raise ValueError("This float is missing digits after " + "the point") if v[0] not in '0123456789': - raise TomlDecodeError("This float doesn't have a leading digit") + raise ValueError("This float doesn't have a leading digit") v = float(v) itype = "float" else: @@ -667,6 +749,22 @@ def _load_value(v, _dict, strictly_valid=True): return (v, itype) +def _bounded_string(s): + if len(s) == 0: + return True + if s[-1] != s[0]: + return False + i = -2 + backslash = False + while len(s) + i > 0: + if s[i] == "\\": + backslash = not backslash + i -= 1 + else: + break + return not backslash + + def _load_array(a, _dict): atype = None retval = [] @@ -687,6 +785,12 @@ def _load_array(a, _dict): in_str = False while end_group_index < len(a[1:]): if a[end_group_index] == '"' or a[end_group_index] == "'": + if in_str: + backslash_index = end_group_index - 1 + while (backslash_index > -1 and + a[backslash_index] == '\\'): + in_str = not in_str + backslash_index -= 1 in_str = not in_str if in_str or a[end_group_index] != '}': end_group_index += 1 @@ -709,9 +813,11 @@ def _load_array(a, _dict): if strarray: while b < len(a) - 1: ab = a[b].strip() - while ab[-1] != ab[0] or (len(ab) > 2 and - ab[0] == ab[1] == ab[2] and - ab[-2] != ab[0] and ab[-3] != ab[0]): + while (not _bounded_string(ab) or + (len(ab) > 2 and + ab[0] == ab[1] == ab[2] and + ab[-2] != ab[0] and + ab[-3] != ab[0])): a[b] = a[b] + ',' + a[b + 1] ab = a[b].strip() if b < len(a) - 2: @@ -739,7 +845,7 @@ def _load_array(a, _dict): nval, ntype = _load_value(a[i], _dict) if atype: if ntype != atype: - raise TomlDecodeError("Not a homogeneous array") + raise ValueError("Not a homogeneous array") else: atype = ntype retval.append(nval) @@ -874,46 +980,57 @@ def _dump_inline_table(section): def _dump_value(v): dump_funcs = { - str: lambda: _dump_str(v), - unicode: lambda: _dump_str(v), - list: lambda: _dump_list(v), - bool: lambda: unicode(v).lower(), - float: lambda: _dump_float(v), - datetime.datetime: lambda: v.isoformat(), + str: _dump_str, + unicode: _dump_str, + list: _dump_list, + int: lambda v: v, + bool: lambda v: unicode(v).lower(), + float: _dump_float, + datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), } # Lookup function corresponding to v's type dump_fn = dump_funcs.get(type(v)) + if dump_fn is None and hasattr(v, '__iter__'): + dump_fn = dump_funcs[list] # Evaluate function (if it exists) else return v - return dump_fn() if dump_fn is not None else v + return dump_fn(v) if dump_fn is not None else dump_funcs[str](v) def _dump_str(v): + if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): + v = v.decode('utf-8') v = "%r" % v if v[0] == 'u': v = v[1:] singlequote = v.startswith("'") - v = v[1:-1] + if singlequote or v.startswith('"'): + v = v[1:-1] if singlequote: v = v.replace("\\'", "'") v = v.replace('"', '\\"') - v = v.replace("\\x", "\\u00") - return unicode('"' + v + '"') + v = v.split("\\x") + while len(v) > 1: + i = -1 + if not v[0]: + v = v[1:] + v[0] = v[0].replace("\\\\", "\\") + # No, I don't know why != works and == breaks + joinx = v[0][i] != "\\" + while v[0][:i] and v[0][i] == "\\": + joinx = not joinx + i -= 1 + if joinx: + joiner = "x" + else: + joiner = "u00" + v = [v[0] + joiner + v[1]] + v[2:] + return unicode('"' + v[0] + '"') def _dump_list(v): - t = [] retval = "[" for u in v: - t.append(_dump_value(u)) - while t != []: - s = [] - for u in t: - if isinstance(u, list): - for r in u: - s.append(r) - else: - retval += " " + unicode(u) + "," - t = s + retval += " " + unicode(_dump_value(u)) + "," retval += "]" return retval diff --git a/pipenv/vendor/toml/LICENSE b/pipenv/vendor/toml/LICENSE new file mode 100644 index 0000000000..08e981ffac --- /dev/null +++ b/pipenv/vendor/toml/LICENSE @@ -0,0 +1,26 @@ +The MIT License + +Copyright 2013-2018 William Pearson +Copyright 2015-2016 Julien Enselme +Copyright 2016 Google Inc. +Copyright 2017 Samuel Vasko +Copyright 2017 Nate Prewitt +Copyright 2017 Jack Evans + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/pipenv/vendor/toml/__init__.py b/pipenv/vendor/toml/__init__.py new file mode 100644 index 0000000000..e436b271b0 --- /dev/null +++ b/pipenv/vendor/toml/__init__.py @@ -0,0 +1,21 @@ +"""Python module which parses and emits TOML. + +Released under the MIT license. +""" + +from toml import encoder +from toml import decoder + +__version__ = "0.10.0" +_spec_ = "0.5.0" + +load = decoder.load +loads = decoder.loads +TomlDecoder = decoder.TomlDecoder +TomlDecodeError = decoder.TomlDecodeError + +dump = encoder.dump +dumps = encoder.dumps +TomlEncoder = encoder.TomlEncoder +TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder +TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder diff --git a/pipenv/vendor/toml/decoder.py b/pipenv/vendor/toml/decoder.py new file mode 100644 index 0000000000..4d468dd488 --- /dev/null +++ b/pipenv/vendor/toml/decoder.py @@ -0,0 +1,945 @@ +import datetime +import io +from os import linesep +import re +import sys + +from toml.tz import TomlTz + +if sys.version_info < (3,): + _range = xrange # noqa: F821 +else: + unicode = str + _range = range + basestring = str + unichr = chr + + +def _detect_pathlib_path(p): + if (3, 4) <= sys.version_info: + import pathlib + if isinstance(p, pathlib.PurePath): + return True + return False + + +def _ispath(p): + if isinstance(p, basestring): + return True + return _detect_pathlib_path(p) + + +def _getpath(p): + if (3, 6) <= sys.version_info: + import os + return os.fspath(p) + if _detect_pathlib_path(p): + return str(p) + return p + + +try: + FNFError = FileNotFoundError +except NameError: + FNFError = IOError + + +TIME_RE = re.compile("([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") + + +class TomlDecodeError(ValueError): + """Base toml Exception / Error.""" + + def __init__(self, msg, doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + colno = pos - doc.rfind('\n', 0, pos) + emsg = '{} (line {} column {} char {})'.format(msg, lineno, colno, pos) + ValueError.__init__(self, emsg) + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno + + +# Matches a TOML number, which allows underscores for readability +_number_with_underscores = re.compile('([0-9])(_([0-9]))*') + + +def _strictly_valid_num(n): + n = n.strip() + if not n: + return False + if n[0] == '_': + return False + if n[-1] == '_': + return False + if "_." in n or "._" in n: + return False + if len(n) == 1: + return True + if n[0] == '0' and n[1] not in ['.', 'o', 'b', 'x']: + return False + if n[0] == '+' or n[0] == '-': + n = n[1:] + if len(n) > 1 and n[0] == '0' and n[1] != '.': + return False + if '__' in n: + return False + return True + + +def load(f, _dict=dict, decoder=None): + """Parses named file or files as toml and returns a dictionary + + Args: + f: Path to the file to open, array of files to read into single dict + or a file descriptor + _dict: (optional) Specifies the class of the returned toml dictionary + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError -- When f is invalid type + TomlDecodeError: Error while decoding toml + IOError / FileNotFoundError -- When an array with no valid (existing) + (Python 2 / Python 3) file paths is passed + """ + + if _ispath(f): + with io.open(_getpath(f), encoding='utf-8') as ffile: + return loads(ffile.read(), _dict, decoder) + elif isinstance(f, list): + from os import path as op + from warnings import warn + if not [path for path in f if op.exists(path)]: + error_msg = "Load expects a list to contain filenames only." + error_msg += linesep + error_msg += ("The list needs to contain the path of at least one " + "existing file.") + raise FNFError(error_msg) + if decoder is None: + decoder = TomlDecoder() + d = decoder.get_empty_table() + for l in f: + if op.exists(l): + d.update(load(l, _dict, decoder)) + else: + warn("Non-existent filename in list with at least one valid " + "filename") + return d + else: + try: + return loads(f.read(), _dict, decoder) + except AttributeError: + raise TypeError("You can only load a file descriptor, filename or " + "list") + + +_groupname_re = re.compile(r'^[A-Za-z0-9_-]+$') + + +def loads(s, _dict=dict, decoder=None): + """Parses string as toml + + Args: + s: String to be parsed + _dict: (optional) Specifies the class of the returned toml dictionary + + Returns: + Parsed toml file represented as a dictionary + + Raises: + TypeError: When a non-string is passed + TomlDecodeError: Error while decoding toml + """ + + implicitgroups = [] + if decoder is None: + decoder = TomlDecoder(_dict) + retval = decoder.get_empty_table() + currentlevel = retval + if not isinstance(s, basestring): + raise TypeError("Expecting something like a string") + + if not isinstance(s, unicode): + s = s.decode('utf8') + + original = s + sl = list(s) + openarr = 0 + openstring = False + openstrchar = "" + multilinestr = False + arrayoftables = False + beginline = True + keygroup = False + dottedkey = False + keyname = 0 + for i, item in enumerate(sl): + if item == '\r' and sl[i + 1] == '\n': + sl[i] = ' ' + continue + if keyname: + if item == '\n': + raise TomlDecodeError("Key name found without value." + " Reached end of line.", original, i) + if openstring: + if item == openstrchar: + keyname = 2 + openstring = False + openstrchar = "" + continue + elif keyname == 1: + if item.isspace(): + keyname = 2 + continue + elif item == '.': + dottedkey = True + continue + elif item.isalnum() or item == '_' or item == '-': + continue + elif (dottedkey and sl[i - 1] == '.' and + (item == '"' or item == "'")): + openstring = True + openstrchar = item + continue + elif keyname == 2: + if item.isspace(): + if dottedkey: + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '.': + dottedkey = True + nextitem = sl[i + 1] + if not nextitem.isspace() and nextitem != '.': + keyname = 1 + continue + if item == '=': + keyname = 0 + dottedkey = False + else: + raise TomlDecodeError("Found invalid character in key name: '" + + item + "'. Try quoting the key name.", + original, i) + if item == "'" and openstrchar != '"': + k = 1 + try: + while sl[i - k] == "'": + k += 1 + if k == 3: + break + except IndexError: + pass + if k == 3: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = "'" + else: + openstrchar = "" + if item == '"' and openstrchar != "'": + oddbackslash = False + k = 1 + tripquote = False + try: + while sl[i - k] == '"': + k += 1 + if k == 3: + tripquote = True + break + if k == 1 or (k == 3 and tripquote): + while sl[i - k] == '\\': + oddbackslash = not oddbackslash + k += 1 + except IndexError: + pass + if not oddbackslash: + if tripquote: + multilinestr = not multilinestr + openstring = multilinestr + else: + openstring = not openstring + if openstring: + openstrchar = '"' + else: + openstrchar = "" + if item == '#' and (not openstring and not keygroup and + not arrayoftables): + j = i + try: + while sl[j] != '\n': + sl[j] = ' ' + j += 1 + except IndexError: + break + if item == '[' and (not openstring and not keygroup and + not arrayoftables): + if beginline: + if len(sl) > i + 1 and sl[i + 1] == '[': + arrayoftables = True + else: + keygroup = True + else: + openarr += 1 + if item == ']' and not openstring: + if keygroup: + keygroup = False + elif arrayoftables: + if sl[i - 1] == ']': + arrayoftables = False + else: + openarr -= 1 + if item == '\n': + if openstring or multilinestr: + if not multilinestr: + raise TomlDecodeError("Unbalanced quotes", original, i) + if ((sl[i - 1] == "'" or sl[i - 1] == '"') and ( + sl[i - 2] == sl[i - 1])): + sl[i] = sl[i - 1] + if sl[i - 3] == sl[i - 1]: + sl[i - 3] = ' ' + elif openarr: + sl[i] = ' ' + else: + beginline = True + elif beginline and sl[i] != ' ' and sl[i] != '\t': + beginline = False + if not keygroup and not arrayoftables: + if sl[i] == '=': + raise TomlDecodeError("Found empty keyname. ", original, i) + keyname = 1 + s = ''.join(sl) + s = s.split('\n') + multikey = None + multilinestr = "" + multibackslash = False + pos = 0 + for idx, line in enumerate(s): + if idx > 0: + pos += len(s[idx - 1]) + 1 + if not multilinestr or multibackslash or '\n' not in multilinestr: + line = line.strip() + if line == "" and (not multikey or multibackslash): + continue + if multikey: + if multibackslash: + multilinestr += line + else: + multilinestr += line + multibackslash = False + if len(line) > 2 and (line[-1] == multilinestr[0] and + line[-2] == multilinestr[0] and + line[-3] == multilinestr[0]): + try: + value, vtype = decoder.load_value(multilinestr) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + currentlevel[multikey] = value + multikey = None + multilinestr = "" + else: + k = len(multilinestr) - 1 + while k > -1 and multilinestr[k] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = multilinestr[:-1] + else: + multilinestr += "\n" + continue + if line[0] == '[': + arrayoftables = False + if len(line) == 1: + raise TomlDecodeError("Opening key group bracket on line by " + "itself.", original, pos) + if line[1] == '[': + arrayoftables = True + line = line[2:] + splitstr = ']]' + else: + line = line[1:] + splitstr = ']' + i = 1 + quotesplits = decoder._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and splitstr in quotesplit: + break + i += quotesplit.count(splitstr) + quoted = not quoted + line = line.split(splitstr, i) + if len(line) < i + 1 or line[-1].strip() != "": + raise TomlDecodeError("Key group not on a line by itself.", + original, pos) + groups = splitstr.join(line[:-1]).split('.') + i = 0 + while i < len(groups): + groups[i] = groups[i].strip() + if len(groups[i]) > 0 and (groups[i][0] == '"' or + groups[i][0] == "'"): + groupstr = groups[i] + j = i + 1 + while not groupstr[0] == groupstr[-1]: + j += 1 + if j > len(groups) + 2: + raise TomlDecodeError("Invalid group name '" + + groupstr + "' Something " + + "went wrong.", original, pos) + groupstr = '.'.join(groups[i:j]).strip() + groups[i] = groupstr[1:-1] + groups[i + 1:j] = [] + else: + if not _groupname_re.match(groups[i]): + raise TomlDecodeError("Invalid group name '" + + groups[i] + "'. Try quoting it.", + original, pos) + i += 1 + currentlevel = retval + for i in _range(len(groups)): + group = groups[i] + if group == "": + raise TomlDecodeError("Can't have a keygroup with an empty " + "name", original, pos) + try: + currentlevel[group] + if i == len(groups) - 1: + if group in implicitgroups: + implicitgroups.remove(group) + if arrayoftables: + raise TomlDecodeError("An implicitly defined " + "table can't be an array", + original, pos) + elif arrayoftables: + currentlevel[group].append(decoder.get_empty_table() + ) + else: + raise TomlDecodeError("What? " + group + + " already exists?" + + str(currentlevel), + original, pos) + except TypeError: + currentlevel = currentlevel[-1] + if group not in currentlevel: + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + except KeyError: + if i != len(groups) - 1: + implicitgroups.append(group) + currentlevel[group] = decoder.get_empty_table() + if i == len(groups) - 1 and arrayoftables: + currentlevel[group] = [decoder.get_empty_table()] + currentlevel = currentlevel[group] + if arrayoftables: + try: + currentlevel = currentlevel[-1] + except KeyError: + pass + elif line[0] == "{": + if line[-1] != "}": + raise TomlDecodeError("Line breaks are not allowed in inline" + "objects", original, pos) + try: + decoder.load_inline_object(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + elif "=" in line: + try: + ret = decoder.load_line(line, currentlevel, multikey, + multibackslash) + except ValueError as err: + raise TomlDecodeError(str(err), original, pos) + if ret is not None: + multikey, multilinestr, multibackslash = ret + return retval + + +def _load_date(val): + microsecond = 0 + tz = None + try: + if len(val) > 19: + if val[19] == '.': + if val[-1].upper() == 'Z': + subsecondval = val[20:-1] + tzval = "Z" + else: + subsecondvalandtz = val[20:] + if '+' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('+') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + elif '-' in subsecondvalandtz: + splitpoint = subsecondvalandtz.index('-') + subsecondval = subsecondvalandtz[:splitpoint] + tzval = subsecondvalandtz[splitpoint:] + else: + tzval = None + subsecondval = subsecondvalandtz + if tzval is not None: + tz = TomlTz(tzval) + microsecond = int(int(subsecondval) * + (10 ** (6 - len(subsecondval)))) + else: + tz = TomlTz(val[19:]) + except ValueError: + tz = None + if "-" not in val[1:]: + return None + try: + if len(val) == 10: + d = datetime.date( + int(val[:4]), int(val[5:7]), + int(val[8:10])) + else: + d = datetime.datetime( + int(val[:4]), int(val[5:7]), + int(val[8:10]), int(val[11:13]), + int(val[14:16]), int(val[17:19]), microsecond, tz) + except ValueError: + return None + return d + + +def _load_unicode_escapes(v, hexbytes, prefix): + skip = False + i = len(v) - 1 + while i > -1 and v[i] == '\\': + skip = not skip + i -= 1 + for hx in hexbytes: + if skip: + skip = False + i = len(hx) - 1 + while i > -1 and hx[i] == '\\': + skip = not skip + i -= 1 + v += prefix + v += hx + continue + hxb = "" + i = 0 + hxblen = 4 + if prefix == "\\U": + hxblen = 8 + hxb = ''.join(hx[i:i + hxblen]).lower() + if hxb.strip('0123456789abcdef'): + raise ValueError("Invalid escape sequence: " + hxb) + if hxb[0] == "d" and hxb[1].strip('01234567'): + raise ValueError("Invalid escape sequence: " + hxb + + ". Only scalar unicode points are allowed.") + v += unichr(int(hxb, 16)) + v += unicode(hx[len(hxb):]) + return v + + +# Unescape TOML string values. + +# content after the \ +_escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] +# What it should be replaced by +_escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] +# Used for substitution +_escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) + + +def _unescape(v): + """Unescape characters in a TOML string.""" + i = 0 + backslash = False + while i < len(v): + if backslash: + backslash = False + if v[i] in _escapes: + v = v[:i - 1] + _escape_to_escapedchars[v[i]] + v[i + 1:] + elif v[i] == '\\': + v = v[:i - 1] + v[i:] + elif v[i] == 'u' or v[i] == 'U': + i += 1 + else: + raise ValueError("Reserved escape sequence used") + continue + elif v[i] == '\\': + backslash = True + i += 1 + return v + + +class InlineTableDict(object): + """Sentinel subclass of dict for inline tables.""" + + +class TomlDecoder(object): + + def __init__(self, _dict=dict): + self._dict = _dict + + def get_empty_table(self): + return self._dict() + + def get_empty_inline_table(self): + class DynamicInlineTableDict(self._dict, InlineTableDict): + """Concrete sentinel subclass for inline tables. + It is a subclass of _dict which is passed in dynamically at load + time + + It is also a subclass of InlineTableDict + """ + + return DynamicInlineTableDict() + + def load_inline_object(self, line, currentlevel, multikey=False, + multibackslash=False): + candidate_groups = line[1:-1].split(",") + groups = [] + if len(candidate_groups) == 1 and not candidate_groups[0].strip(): + candidate_groups.pop() + while len(candidate_groups) > 0: + candidate_group = candidate_groups.pop(0) + try: + _, value = candidate_group.split('=', 1) + except ValueError: + raise ValueError("Invalid inline table encountered") + value = value.strip() + if ((value[0] == value[-1] and value[0] in ('"', "'")) or ( + value[0] in '-0123456789' or + value in ('true', 'false') or + (value[0] == "[" and value[-1] == "]") or + (value[0] == '{' and value[-1] == '}'))): + groups.append(candidate_group) + elif len(candidate_groups) > 0: + candidate_groups[0] = (candidate_group + "," + + candidate_groups[0]) + else: + raise ValueError("Invalid inline table value encountered") + for group in groups: + status = self.load_line(group, currentlevel, multikey, + multibackslash) + if status is not None: + break + + def _get_split_on_quotes(self, line): + doublequotesplits = line.split('"') + quoted = False + quotesplits = [] + if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]: + singlequotesplits = doublequotesplits[0].split("'") + doublequotesplits = doublequotesplits[1:] + while len(singlequotesplits) % 2 == 0 and len(doublequotesplits): + singlequotesplits[-1] += '"' + doublequotesplits[0] + doublequotesplits = doublequotesplits[1:] + if "'" in singlequotesplits[-1]: + singlequotesplits = (singlequotesplits[:-1] + + singlequotesplits[-1].split("'")) + quotesplits += singlequotesplits + for doublequotesplit in doublequotesplits: + if quoted: + quotesplits.append(doublequotesplit) + else: + quotesplits += doublequotesplit.split("'") + quoted = not quoted + return quotesplits + + def load_line(self, line, currentlevel, multikey, multibackslash): + i = 1 + quotesplits = self._get_split_on_quotes(line) + quoted = False + for quotesplit in quotesplits: + if not quoted and '=' in quotesplit: + break + i += quotesplit.count('=') + quoted = not quoted + pair = line.split('=', i) + strictly_valid = _strictly_valid_num(pair[-1]) + if _number_with_underscores.match(pair[-1]): + pair[-1] = pair[-1].replace('_', '') + while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and + pair[-1][0] != "'" and pair[-1][0] != '"' and + pair[-1][0] != '[' and pair[-1][0] != '{' and + pair[-1] != 'true' and pair[-1] != 'false'): + try: + float(pair[-1]) + break + except ValueError: + pass + if _load_date(pair[-1]) is not None: + break + i += 1 + prev_val = pair[-1] + pair = line.split('=', i) + if prev_val == pair[-1]: + raise ValueError("Invalid date or number") + if strictly_valid: + strictly_valid = _strictly_valid_num(pair[-1]) + pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()] + if '.' in pair[0]: + if '"' in pair[0] or "'" in pair[0]: + quotesplits = self._get_split_on_quotes(pair[0]) + quoted = False + levels = [] + for quotesplit in quotesplits: + if quoted: + levels.append(quotesplit) + else: + levels += [level.strip() for level in + quotesplit.split('.')] + quoted = not quoted + else: + levels = pair[0].split('.') + while levels[-1] == "": + levels = levels[:-1] + for level in levels[:-1]: + if level == "": + continue + if level not in currentlevel: + currentlevel[level] = self.get_empty_table() + currentlevel = currentlevel[level] + pair[0] = levels[-1].strip() + elif (pair[0][0] == '"' or pair[0][0] == "'") and \ + (pair[0][-1] == pair[0][0]): + pair[0] = pair[0][1:-1] + if len(pair[1]) > 2 and ((pair[1][0] == '"' or pair[1][0] == "'") and + pair[1][1] == pair[1][0] and + pair[1][2] == pair[1][0] and + not (len(pair[1]) > 5 and + pair[1][-1] == pair[1][0] and + pair[1][-2] == pair[1][0] and + pair[1][-3] == pair[1][0])): + k = len(pair[1]) - 1 + while k > -1 and pair[1][k] == '\\': + multibackslash = not multibackslash + k -= 1 + if multibackslash: + multilinestr = pair[1][:-1] + else: + multilinestr = pair[1] + "\n" + multikey = pair[0] + else: + value, vtype = self.load_value(pair[1], strictly_valid) + try: + currentlevel[pair[0]] + raise ValueError("Duplicate keys!") + except TypeError: + raise ValueError("Duplicate keys!") + except KeyError: + if multikey: + return multikey, multilinestr, multibackslash + else: + currentlevel[pair[0]] = value + + def load_value(self, v, strictly_valid=True): + if not v: + raise ValueError("Empty value is invalid") + if v == 'true': + return (True, "bool") + elif v == 'false': + return (False, "bool") + elif v[0] == '"' or v[0] == "'": + quotechar = v[0] + testv = v[1:].split(quotechar) + triplequote = False + triplequotecount = 0 + if len(testv) > 1 and testv[0] == '' and testv[1] == '': + testv = testv[2:] + triplequote = True + closed = False + for tv in testv: + if tv == '': + if triplequote: + triplequotecount += 1 + else: + closed = True + else: + oddbackslash = False + try: + i = -1 + j = tv[i] + while j == '\\': + oddbackslash = not oddbackslash + i -= 1 + j = tv[i] + except IndexError: + pass + if not oddbackslash: + if closed: + raise ValueError("Stuff after closed string. WTF?") + else: + if not triplequote or triplequotecount > 1: + closed = True + else: + triplequotecount = 0 + if quotechar == '"': + escapeseqs = v.split('\\')[1:] + backslash = False + for i in escapeseqs: + if i == '': + backslash = not backslash + else: + if i[0] not in _escapes and (i[0] != 'u' and + i[0] != 'U' and + not backslash): + raise ValueError("Reserved escape sequence used") + if backslash: + backslash = False + for prefix in ["\\u", "\\U"]: + if prefix in v: + hexbytes = v.split(prefix) + v = _load_unicode_escapes(hexbytes[0], hexbytes[1:], + prefix) + v = _unescape(v) + if len(v) > 1 and v[1] == quotechar and (len(v) < 3 or + v[1] == v[2]): + v = v[2:-2] + return (v[1:-1], "str") + elif v[0] == '[': + return (self.load_array(v), "array") + elif v[0] == '{': + inline_object = self.get_empty_inline_table() + self.load_inline_object(v, inline_object) + return (inline_object, "inline_object") + elif TIME_RE.match(v): + h, m, s, _, ms = TIME_RE.match(v).groups() + time = datetime.time(int(h), int(m), int(s), int(ms) if ms else 0) + return (time, "time") + else: + parsed_date = _load_date(v) + if parsed_date is not None: + return (parsed_date, "date") + if not strictly_valid: + raise ValueError("Weirdness with leading zeroes or " + "underscores in your number.") + itype = "int" + neg = False + if v[0] == '-': + neg = True + v = v[1:] + elif v[0] == '+': + v = v[1:] + v = v.replace('_', '') + lowerv = v.lower() + if '.' in v or ('x' not in v and ('e' in v or 'E' in v)): + if '.' in v and v.split('.', 1)[1] == '': + raise ValueError("This float is missing digits after " + "the point") + if v[0] not in '0123456789': + raise ValueError("This float doesn't have a leading " + "digit") + v = float(v) + itype = "float" + elif len(lowerv) == 3 and (lowerv == 'inf' or lowerv == 'nan'): + v = float(v) + itype = "float" + if itype == "int": + v = int(v, 0) + if neg: + return (0 - v, itype) + return (v, itype) + + def bounded_string(self, s): + if len(s) == 0: + return True + if s[-1] != s[0]: + return False + i = -2 + backslash = False + while len(s) + i > 0: + if s[i] == "\\": + backslash = not backslash + i -= 1 + else: + break + return not backslash + + def load_array(self, a): + atype = None + retval = [] + a = a.strip() + if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip(): + strarray = False + tmpa = a[1:-1].strip() + if tmpa != '' and (tmpa[0] == '"' or tmpa[0] == "'"): + strarray = True + if not a[1:-1].strip().startswith('{'): + a = a[1:-1].split(',') + else: + # a is an inline object, we must find the matching parenthesis + # to define groups + new_a = [] + start_group_index = 1 + end_group_index = 2 + in_str = False + while end_group_index < len(a[1:]): + if a[end_group_index] == '"' or a[end_group_index] == "'": + if in_str: + backslash_index = end_group_index - 1 + while (backslash_index > -1 and + a[backslash_index] == '\\'): + in_str = not in_str + backslash_index -= 1 + in_str = not in_str + if in_str or a[end_group_index] != '}': + end_group_index += 1 + continue + + # Increase end_group_index by 1 to get the closing bracket + end_group_index += 1 + + new_a.append(a[start_group_index:end_group_index]) + + # The next start index is at least after the closing + # bracket, a closing bracket can be followed by a comma + # since we are in an array. + start_group_index = end_group_index + 1 + while (start_group_index < len(a[1:]) and + a[start_group_index] != '{'): + start_group_index += 1 + end_group_index = start_group_index + 1 + a = new_a + b = 0 + if strarray: + while b < len(a) - 1: + ab = a[b].strip() + while (not self.bounded_string(ab) or + (len(ab) > 2 and + ab[0] == ab[1] == ab[2] and + ab[-2] != ab[0] and + ab[-3] != ab[0])): + a[b] = a[b] + ',' + a[b + 1] + ab = a[b].strip() + if b < len(a) - 2: + a = a[:b + 1] + a[b + 2:] + else: + a = a[:b + 1] + b += 1 + else: + al = list(a[1:-1]) + a = [] + openarr = 0 + j = 0 + for i in _range(len(al)): + if al[i] == '[': + openarr += 1 + elif al[i] == ']': + openarr -= 1 + elif al[i] == ',' and not openarr: + a.append(''.join(al[j:i])) + j = i + 1 + a.append(''.join(al[j:])) + for i in _range(len(a)): + a[i] = a[i].strip() + if a[i] != '': + nval, ntype = self.load_value(a[i]) + if atype: + if ntype != atype: + raise ValueError("Not a homogeneous array") + else: + atype = ntype + retval.append(nval) + return retval diff --git a/pipenv/vendor/toml/encoder.py b/pipenv/vendor/toml/encoder.py new file mode 100644 index 0000000000..79bfd37b3d --- /dev/null +++ b/pipenv/vendor/toml/encoder.py @@ -0,0 +1,250 @@ +import datetime +import re +import sys + +from toml.decoder import InlineTableDict + +if sys.version_info >= (3,): + unicode = str + + +def dump(o, f): + """Writes out dict as toml to a file + + Args: + o: Object to dump into toml + f: File descriptor where the toml should be stored + + Returns: + String containing the toml corresponding to dictionary + + Raises: + TypeError: When anything other than file descriptor is passed + """ + + if not f.write: + raise TypeError("You can only dump an object to a file descriptor") + d = dumps(o) + f.write(d) + return d + + +def dumps(o, encoder=None): + """Stringifies input dict as toml + + Args: + o: Object to dump into toml + + preserve: Boolean parameter. If true, preserve inline tables. + + Returns: + String containing the toml corresponding to dict + """ + + retval = "" + if encoder is None: + encoder = TomlEncoder(o.__class__) + addtoretval, sections = encoder.dump_sections(o, "") + retval += addtoretval + while sections: + newsections = encoder.get_empty_table() + for section in sections: + addtoretval, addtosections = encoder.dump_sections( + sections[section], section) + + if addtoretval or (not addtoretval and not addtosections): + if retval and retval[-2:] != "\n\n": + retval += "\n" + retval += "[" + section + "]\n" + if addtoretval: + retval += addtoretval + for s in addtosections: + newsections[section + "." + s] = addtosections[s] + sections = newsections + return retval + + +def _dump_str(v): + if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): + v = v.decode('utf-8') + v = "%r" % v + if v[0] == 'u': + v = v[1:] + singlequote = v.startswith("'") + if singlequote or v.startswith('"'): + v = v[1:-1] + if singlequote: + v = v.replace("\\'", "'") + v = v.replace('"', '\\"') + v = v.split("\\x") + while len(v) > 1: + i = -1 + if not v[0]: + v = v[1:] + v[0] = v[0].replace("\\\\", "\\") + # No, I don't know why != works and == breaks + joinx = v[0][i] != "\\" + while v[0][:i] and v[0][i] == "\\": + joinx = not joinx + i -= 1 + if joinx: + joiner = "x" + else: + joiner = "u00" + v = [v[0] + joiner + v[1]] + v[2:] + return unicode('"' + v[0] + '"') + + +def _dump_float(v): + return "{0:.16}".format(v).replace("e+0", "e+").replace("e-0", "e-") + + +def _dump_time(v): + utcoffset = v.utcoffset() + if utcoffset is None: + return v.isoformat() + # The TOML norm specifies that it's local time thus we drop the offset + return v.isoformat()[:-6] + + +class TomlEncoder(object): + + def __init__(self, _dict=dict, preserve=False): + self._dict = _dict + self.preserve = preserve + self.dump_funcs = { + str: _dump_str, + unicode: _dump_str, + list: self.dump_list, + bool: lambda v: unicode(v).lower(), + int: lambda v: v, + float: _dump_float, + datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), + datetime.time: _dump_time, + datetime.date: lambda v: v.isoformat() + } + + def get_empty_table(self): + return self._dict() + + def dump_list(self, v): + retval = "[" + for u in v: + retval += " " + unicode(self.dump_value(u)) + "," + retval += "]" + return retval + + def dump_inline_table(self, section): + """Preserve inline table in its compact syntax instead of expanding + into subsection. + + https://github.com/toml-lang/toml#user-content-inline-table + """ + retval = "" + if isinstance(section, dict): + val_list = [] + for k, v in section.items(): + val = self.dump_inline_table(v) + val_list.append(k + " = " + val) + retval += "{ " + ", ".join(val_list) + " }\n" + return retval + else: + return unicode(self.dump_value(section)) + + def dump_value(self, v): + # Lookup function corresponding to v's type + dump_fn = self.dump_funcs.get(type(v)) + if dump_fn is None and hasattr(v, '__iter__'): + dump_fn = self.dump_funcs[list] + # Evaluate function (if it exists) else return v + return dump_fn(v) if dump_fn is not None else self.dump_funcs[str](v) + + def dump_sections(self, o, sup): + retstr = "" + if sup != "" and sup[-1] != ".": + sup += '.' + retdict = self._dict() + arraystr = "" + for section in o: + section = unicode(section) + qsection = section + if not re.match(r'^[A-Za-z0-9_-]+$', section): + if '"' in section: + qsection = "'" + section + "'" + else: + qsection = '"' + section + '"' + if not isinstance(o[section], dict): + arrayoftables = False + if isinstance(o[section], list): + for a in o[section]: + if isinstance(a, dict): + arrayoftables = True + if arrayoftables: + for a in o[section]: + arraytabstr = "\n" + arraystr += "[[" + sup + qsection + "]]\n" + s, d = self.dump_sections(a, sup + qsection) + if s: + if s[0] == "[": + arraytabstr += s + else: + arraystr += s + while d: + newd = self._dict() + for dsec in d: + s1, d1 = self.dump_sections(d[dsec], sup + + qsection + "." + + dsec) + if s1: + arraytabstr += ("[" + sup + qsection + + "." + dsec + "]\n") + arraytabstr += s1 + for s1 in d1: + newd[dsec + "." + s1] = d1[s1] + d = newd + arraystr += arraytabstr + else: + if o[section] is not None: + retstr += (qsection + " = " + + unicode(self.dump_value(o[section])) + '\n') + elif self.preserve and isinstance(o[section], InlineTableDict): + retstr += (qsection + " = " + + self.dump_inline_table(o[section])) + else: + retdict[qsection] = o[section] + retstr += arraystr + return (retstr, retdict) + + +class TomlPreserveInlineDictEncoder(TomlEncoder): + + def __init__(self, _dict=dict): + super(TomlPreserveInlineDictEncoder, self).__init__(_dict, True) + + +class TomlArraySeparatorEncoder(TomlEncoder): + + def __init__(self, _dict=dict, preserve=False, separator=","): + super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) + if separator.strip() == "": + separator = "," + separator + elif separator.strip(' \t\n\r,'): + raise ValueError("Invalid separator for arrays") + self.separator = separator + + def dump_list(self, v): + t = [] + retval = "[" + for u in v: + t.append(self.dump_value(u)) + while t != []: + s = [] + for u in t: + if isinstance(u, list): + for r in u: + s.append(r) + else: + retval += " " + unicode(u) + self.separator + t = s + retval += "]" + return retval diff --git a/pipenv/vendor/toml/ordered.py b/pipenv/vendor/toml/ordered.py new file mode 100644 index 0000000000..9c20c41a1b --- /dev/null +++ b/pipenv/vendor/toml/ordered.py @@ -0,0 +1,15 @@ +from collections import OrderedDict +from toml import TomlEncoder +from toml import TomlDecoder + + +class TomlOrderedDecoder(TomlDecoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) + + +class TomlOrderedEncoder(TomlEncoder): + + def __init__(self): + super(self.__class__, self).__init__(_dict=OrderedDict) diff --git a/pipenv/vendor/toml/tz.py b/pipenv/vendor/toml/tz.py new file mode 100644 index 0000000000..93c3c8ad26 --- /dev/null +++ b/pipenv/vendor/toml/tz.py @@ -0,0 +1,21 @@ +from datetime import tzinfo, timedelta + + +class TomlTz(tzinfo): + def __init__(self, toml_offset): + if toml_offset == "Z": + self._raw_offset = "+00:00" + else: + self._raw_offset = toml_offset + self._sign = -1 if self._raw_offset[0] == '-' else 1 + self._hours = int(self._raw_offset[1:3]) + self._minutes = int(self._raw_offset[4:6]) + + def tzname(self, dt): + return "UTC" + self._raw_offset + + def utcoffset(self, dt): + return self._sign * timedelta(hours=self._hours, minutes=self._minutes) + + def dst(self, dt): + return timedelta(0) diff --git a/pipenv/vendor/tomlkit/__init__.py b/pipenv/vendor/tomlkit/__init__.py index 45de35fdb6..23d4ef7461 100644 --- a/pipenv/vendor/tomlkit/__init__.py +++ b/pipenv/vendor/tomlkit/__init__.py @@ -22,4 +22,4 @@ from .api import ws -__version__ = "0.4.2" +__version__ = "0.4.4" diff --git a/pipenv/vendor/tomlkit/_compat.py b/pipenv/vendor/tomlkit/_compat.py index 26296ff52f..f94bb10e26 100644 --- a/pipenv/vendor/tomlkit/_compat.py +++ b/pipenv/vendor/tomlkit/_compat.py @@ -162,16 +162,3 @@ def decode(string, encodings=None): pass return string.decode(encodings[0], errors="ignore") - - -_escaped = {"b": "\b", "t": "\t", "n": "\n", "f": "\f", "r": "\r", '"': '"', "\\": "\\"} -_escapable = re.compile(r"(? Union[datetime, date, time] return time(hour, minute, second, microsecond) raise ValueError("Invalid RFC 339 string") + + +_escaped = {"b": "\b", "t": "\t", "n": "\n", "f": "\f", "r": "\r", '"': '"', "\\": "\\"} +_escapes = {v: k for k, v in _escaped.items()} + + +def escape_string(s): + s = decode(s) + + res = [] + start = 0 + + def flush(): + if start != i: + res.append(s[start:i]) + + return i + 1 + + i = 0 + while i < len(s): + c = s[i] + if c in '"\\\n\r\t\b\f': + start = flush() + res.append("\\" + _escapes[c]) + elif ord(c) < 0x20: + start = flush() + res.append("\\u%04x" % ord(c)) + i += 1 + + flush() + + return "".join(res) diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index a7876ff11c..bb3696d984 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -137,9 +137,9 @@ def append(self, key, item): # type: (Union[Key, str], Item) -> Container is_table = isinstance(item, (Table, AoT)) if key is not None and self._body and not self._parsed: # If there is already at least one table in the current container - # an the given item is not a table, we need to find the last + # and the given item is not a table, we need to find the last # item that is not a table and insert after it - # If not such item exists, insert at the top of the table + # If no such item exists, insert at the top of the table key_after = None idx = 0 for k, v in self._body: @@ -333,7 +333,7 @@ def _render_table( if table.is_aot_element(): open_, close = "[[", "]]" - cur += "{}{}{}{}{}{}{}".format( + cur += "{}{}{}{}{}{}{}{}".format( table.trivia.indent, open_, decode(_key), @@ -341,6 +341,7 @@ def _render_table( table.trivia.comment_ws, decode(table.trivia.comment), table.trivia.trail, + "\n" if "\n" not in table.trivia.trail and len(table.value) > 0 else "", ) for k, v in table.value.body: @@ -450,6 +451,10 @@ def items(self): # type: () -> Generator[Item] yield k, v + def update(self, other): # type: (Dict) -> None + for k, v in other.items(): + self[k] = v + def __contains__(self, key): # type: (Union[Key, str]) -> bool if not isinstance(key, Key): key = Key(key) diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 8807f4b3bc..26f24701d0 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -15,6 +15,7 @@ from ._compat import PY2 from ._compat import decode from ._compat import unicode +from ._utils import escape_string def item(value, _parent=None): @@ -60,7 +61,7 @@ def item(value, _parent=None): return a elif isinstance(value, (str, unicode)): - escaped = decode(value).replace('"', '\\"').replace("\\\\", "\\") + escaped = escape_string(value) return String(StringType.SLB, value, escaped, Trivia()) elif isinstance(value, datetime): @@ -751,6 +752,10 @@ def items(self): # type: () -> Generator[Item] for k, v in self._value.items(): yield k, v + def update(self, other): # type: (Dict) -> None + for k, v in other.items(): + self[k] = v + def __contains__(self, key): # type: (Union[Key, str]) -> bool return key in self._value @@ -758,7 +763,26 @@ def __getitem__(self, key): # type: (Union[Key, str]) -> Item return self._value[key] def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None - self.append(key, value) + if not isinstance(value, Item): + value = item(value) + + self._value[key] = value + + if key is not None: + super(Table, self).__setitem__(key, value) + + m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) + if not m: + return + + indent = m.group(1) + + if not isinstance(value, Whitespace): + m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent) + if not m: + value.trivia.indent = indent + else: + value.trivia.indent = m.group(1) + indent + m.group(2) def __delitem__(self, key): # type: (Union[Key, str]) -> None self.remove(key) @@ -862,6 +886,10 @@ def items(self): # type: () -> Generator[Item] for k, v in self._value.items(): yield k, v + def update(self, other): # type: (Dict) -> None + for k, v in other.items(): + self[k] = v + def __contains__(self, key): # type: (Union[Key, str]) -> bool return key in self._value @@ -869,7 +897,26 @@ def __getitem__(self, key): # type: (Union[Key, str]) -> Item return self._value[key] def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None - self.append(key, value) + if not isinstance(value, Item): + value = item(value) + + self._value[key] = value + + if key is not None: + super(InlineTable, self).__setitem__(key, value) + + m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent) + if not m: + return + + indent = m.group(1) + + if not isinstance(value, Whitespace): + m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent) + if not m: + value.trivia.indent = indent + else: + value.trivia.indent = m.group(1) + indent + m.group(2) def __delitem__(self, key): # type: (Union[Key, str]) -> None self.remove(key) diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index 3d4984d126..45c8ee8cbb 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py @@ -11,6 +11,7 @@ from ._compat import PY2 from ._compat import chr from ._compat import decode +from ._utils import _escaped from ._utils import parse_rfc3339 from .container import Container from .exceptions import EmptyKeyError @@ -286,19 +287,20 @@ def _parse_item(self): # type: () -> Optional[Tuple[Optional[Key], Item]] def _save_idx(self): # type: () -> Tuple[Iterator, int, str] if PY2: - return itertools.tee(self._chars)[1], self._idx, self._current + # Python 2.7 does not allow to directly copy + # an iterator, so we have to make tees of the original + # chars iterator. + chars1, chars2 = itertools.tee(self._chars) + + # We can no longer use the original chars iterator. + self._chars = chars1 + + return chars2, self._idx, self._current return copy(self._chars), self._idx, self._current def _restore_idx(self, chars, idx, current): # type: (Iterator, int, str) -> None - if PY2: - self._chars = iter( - [(i + idx, TOMLChar(c)) for i, c in enumerate(self._src[idx:])] - ) - next(self._chars) - else: - self._chars = chars - + self._chars = chars self._idx = idx self._current = current @@ -736,15 +738,6 @@ def _parse_string(self, delim): # type: (str) -> Item return String(str_type, value, val, Trivia()) else: - escape_vals = { - "b": "\b", - "t": "\t", - "n": "\n", - "f": "\f", - "r": "\r", - "\\": "\\", - '"': '"', - } if previous == "\\" and self._current.is_ws() and multiline: while self._current.is_ws(): previous = self._current @@ -768,10 +761,10 @@ def _parse_string(self, delim): # type: (str) -> Item raise self.parse_error(UnexpectedEofError) continue - elif self._current in escape_vals and not escaped: + elif self._current in _escaped and not escaped: if not str_type.is_literal(): value = value[:-1] - value += escape_vals[self._current] + value += _escaped[self._current] else: value += self._current elif self._current in {"u", "U"} and not escaped: diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index 1e843dee7c..4be472ab25 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -13,7 +13,7 @@ from .path import mkdir_p, rmtree -__version__ = '0.1.5' +__version__ = '0.1.6' __all__ = [ diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 98ebe502f5..723bb11748 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -13,15 +13,20 @@ import six -from yaspin import yaspin, spinners - from .cmdparse import Script from .compat import Path, fs_str, partialmethod __all__ = [ - "shell_escape", "unnest", "dedup", "run", "load_path", "partialclass", "to_text", - "to_bytes", "locale_encoding" + "shell_escape", + "unnest", + "dedup", + "run", + "load_path", + "partialclass", + "to_text", + "to_bytes", + "locale_encoding", ] @@ -72,15 +77,17 @@ def dedup(iterable): def _spawn_subprocess(script, env={}, block=True, cwd=None): from distutils.spawn import find_executable + command = find_executable(script.command) options = { "env": env, "universal_newlines": True, "stdout": subprocess.PIPE, "stderr": subprocess.PIPE if block else subprocess.STDOUT, - "stdin": None if block else subprocess.PIPE, - "shell": False + "shell": False, } + if not block: + options["stdin"] = subprocess.PIPE if cwd: options["cwd"] = cwd # Command not found, maybe this is a shell built-in? @@ -102,17 +109,26 @@ def _spawn_subprocess(script, env={}, block=True, cwd=None): return subprocess.Popen(script.cmdify(), **options) -def _create_subprocess(cmd, env={}, block=True, return_object=False, cwd=os.curdir, verbose=False, spinner=None): +def _create_subprocess( + cmd, + env={}, + block=True, + return_object=False, + cwd=os.curdir, + verbose=False, + spinner=None, +): try: c = _spawn_subprocess(cmd, env=env, block=block, cwd=cwd) except Exception as exc: - print( - "Error %s while executing command %s", exc, " ".join(cmd._parts) - ) + print("Error %s while executing command %s", exc, " ".join(cmd._parts)) raise if not block: c.stdin.close() output = [] + spinner_orig_text = "" + if spinner: + spinner_orig_text = spinner.text if c.stdout is not None: while True: line = to_text(c.stdout.readline()) @@ -120,17 +136,22 @@ def _create_subprocess(cmd, env={}, block=True, return_object=False, cwd=os.curd break line = line.rstrip() output.append(line) + display_line = line + if len(line) > 200: + display_line = "{0}...".format(line[:200]) if verbose: - print(line + "\n") - elif spinner: - spinner.text = line + spinner.write(display_line) else: + spinner.text = "{0} {1}".format(spinner_orig_text, display_line) continue try: c.wait() finally: if c.stdout: c.stdout.close() + if spinner: + spinner.text = "Complete!" + spinner.ok("✔") c.out = "".join(output) c.err = "" else: @@ -140,7 +161,15 @@ def _create_subprocess(cmd, env={}, block=True, return_object=False, cwd=os.curd return c -def run(cmd, env={}, return_object=False, block=True, cwd=None, verbose=False, nospin=False,): +def run( + cmd, + env={}, + return_object=False, + block=True, + cwd=None, + verbose=False, + nospin=False, +): """Use `subprocess.Popen` to get the output of a command and decode it. :param list cmd: A list representing the command you want to run. @@ -166,17 +195,46 @@ def run(cmd, env={}, return_object=False, block=True, cwd=None, verbose=False, n cmd = [c.encode("utf-8") for c in cmd] if not isinstance(cmd, Script): cmd = Script.parse(cmd) - spinner = yaspin - if nospin: + if nospin is False: + try: + from yaspin import yaspin + from yaspin import spinners + except ImportError: + raise RuntimeError( + "Failed to import spinner! Reinstall vistir with command:" + " pip install --upgrade vistir[spinner]" + ) + else: + spinner = yaspin + animation = spinners.Spinners.bouncingBar + else: + @contextmanager - def spinner(spin_type): + def spinner(spin_type, text): class FakeClass(object): - def __init__(self): - self.text = "" - myobj = FakeClass() + def __init__(self, text=""): + self.text = text + + def ok(self, text): + return + + def write(self, text): + print(text) + + myobj = FakeClass(text) yield myobj - with spinner(spinners.Spinners.bouncingBar) as sp: - return _create_subprocess(cmd, env=_env, return_object=return_object, block=block, cwd=cwd, verbose=verbose, spinner=sp) + + animation = None + with spinner(animation, text="Running...") as sp: + return _create_subprocess( + cmd, + env=_env, + return_object=return_object, + block=block, + cwd=cwd, + verbose=verbose, + spinner=sp, + ) def load_path(python): @@ -217,20 +275,18 @@ def partialclass(cls, *args, **kwargs): {'url': 'https://pypi.org/simple', 'verify_ssl': True, 'name': 'pypi'} """ - name_attrs = [n for n in (getattr(cls, name, str(cls)) for name in ("__name__", "__qualname__")) if n is not None] + name_attrs = [ + n + for n in (getattr(cls, name, str(cls)) for name in ("__name__", "__qualname__")) + if n is not None + ] name_attrs = name_attrs[0] type_ = type( - name_attrs, - (cls,), - { - "__init__": partialmethod(cls.__init__, *args, **kwargs), - } + name_attrs, (cls,), {"__init__": partialmethod(cls.__init__, *args, **kwargs)} ) # Swiped from attrs.make_class try: - type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__", - ) + type_.__module__ = sys._getframe(1).f_globals.get("__name__", "__main__") except (AttributeError, ValueError): pass return type_ @@ -258,7 +314,7 @@ def to_bytes(string, encoding="utf-8", errors="ignore"): if encoding.lower() == "utf-8": return string else: - return string.decode('utf-8').encode(encoding, errors) + return string.decode("utf-8").encode(encoding, errors) elif isinstance(string, memoryview): return bytes(string) elif not isinstance(string, six.string_types): @@ -269,7 +325,7 @@ def to_bytes(string, encoding="utf-8", errors="ignore"): return bytes(string) except UnicodeEncodeError: if isinstance(string, Exception): - return b' '.join(to_bytes(arg, encoding, errors) for arg in string) + return b" ".join(to_bytes(arg, encoding, errors) for arg in string) return six.text_type(string).encode(encoding, errors) else: return string.encode(encoding, errors) @@ -300,18 +356,18 @@ def to_text(string, encoding="utf-8", errors=None): string = six.text_type(string, encoding, errors) else: string = six.text_type(string) - elif hasattr(string, '__unicode__'): + elif hasattr(string, "__unicode__"): string = six.text_type(string) else: string = six.text_type(bytes(string), encoding, errors) else: string = string.decode(encoding, errors) except UnicodeDecodeError as e: - string = ' '.join(to_text(arg, encoding, errors) for arg in string) + string = " ".join(to_text(arg, encoding, errors) for arg in string) return string try: - locale_encoding = locale.getdefaultencoding()[1] or 'ascii' + locale_encoding = locale.getdefaultencoding()[1] or "ascii" except Exception: - locale_encoding = 'ascii' + locale_encoding = "ascii" diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 3c5d8d9987..3198c2d40d 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -627,8 +627,8 @@ def main(ctx, package=None): vendor_file = _vendor_dir / 'vendor.txt' vendor_file.write_bytes(vendor_src_file.read_bytes()) download_licenses(ctx, _vendor_dir) - from .vendor_passa import vendor_passa - log("Vendoring passa...") - vendor_passa(ctx) + # from .vendor_passa import vendor_passa + # log("Vendoring passa...") + # vendor_passa(ctx) # update_safety(ctx) log('Revendoring complete') diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 90c420b989..5e219609f4 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -96,11 +96,11 @@ index bf69803..eb20560 100644 + SafeFileCache ) +os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" -+from pip_shims.shims import pip_import, VcsSupport, WheelCache ++from pip_shims.shims import do_import, VcsSupport, WheelCache +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier +from packaging.markers import Op, Value, Variable, Marker -+InstallationError = pip_import("InstallationError", "exceptions.InstallationError", "7.0", "9999") ++InstallationError = do_import(("InstallationError", "exceptions.InstallationError", "7.0", "9999")) +from notpip._internal.resolve import Resolver as PipResolver + From 0f9ce3e528d0bdbb82a2406ba3b80ba9bfca0797 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Oct 2018 14:29:22 -0400 Subject: [PATCH 16/35] Re-updated pip patch to ignore compatibility when sorting Signed-off-by: Dan Ryan --- pipenv/patched/notpip/_internal/index.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/patched/notpip/_internal/index.py b/pipenv/patched/notpip/_internal/index.py index 98102f3f07..793dd1cb93 100644 --- a/pipenv/patched/notpip/_internal/index.py +++ b/pipenv/patched/notpip/_internal/index.py @@ -294,7 +294,7 @@ def sort_path(path): return files, urls - def _candidate_sort_key(self, candidate, ignore_compatibility=False): + def _candidate_sort_key(self, candidate, ignore_compatibility=True): """ Function used to generate link sort key for link tuples. The greater the return value, the more preferred it is. From 121390d83d9d6e299f23f15e960c51eeb2d0212d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 6 Oct 2018 18:50:12 -0400 Subject: [PATCH 17/35] Patch pipdeptree Signed-off-by: Dan Ryan --- pipenv/vendor/pipdeptree.py | 6 +----- .../vendor/pipdeptree-updated-pip18.patch | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 5 deletions(-) create mode 100644 tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py index 7820aa5db4..9cce0325e7 100644 --- a/pipenv/vendor/pipdeptree.py +++ b/pipenv/vendor/pipdeptree.py @@ -13,11 +13,7 @@ except ImportError: from ordereddict import OrderedDict -try: - from pip._internal import get_installed_distributions - from pip._internal.operations.freeze import FrozenRequirement -except ImportError: - from pip import get_installed_distributions, FrozenRequirement +from pipenv.vendor.pip_shims import get_installed_distributions, FrozenRequirement import pkg_resources # inline: diff --git a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch b/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch new file mode 100644 index 0000000000..1e447c6122 --- /dev/null +++ b/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch @@ -0,0 +1,17 @@ +diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py +index 7820aa5d..9cce0325 100644 +--- a/pipenv/vendor/pipdeptree.py ++++ b/pipenv/vendor/pipdeptree.py +@@ -13,11 +13,7 @@ try: + except ImportError: + from ordereddict import OrderedDict + +-try: +- from pip._internal import get_installed_distributions +- from pip._internal.operations.freeze import FrozenRequirement +-except ImportError: +- from pip import get_installed_distributions, FrozenRequirement ++from pipenv.vendor.pip_shims import get_installed_distributions, FrozenRequirement + + import pkg_resources + # inline: From 17e73f5cb930a22b4b83aeb87f30690ef60aad98 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 01:02:23 -0400 Subject: [PATCH 18/35] Update pip_shims Signed-off-by: Dan Ryan --- pipenv/vendor/pip_shims/__init__.py | 2 +- pipenv/vendor/pip_shims/shims.py | 21 ++++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py index 8cb2bca117..1342f79341 100644 --- a/pipenv/vendor/pip_shims/__init__.py +++ b/pipenv/vendor/pip_shims/__init__.py @@ -3,7 +3,7 @@ import sys -__version__ = '0.3.0' +__version__ = '0.3.1' from . import shims diff --git a/pipenv/vendor/pip_shims/shims.py b/pipenv/vendor/pip_shims/shims.py index 656ff7f58c..7b81a60858 100644 --- a/pipenv/vendor/pip_shims/shims.py +++ b/pipenv/vendor/pip_shims/shims.py @@ -32,12 +32,15 @@ def __all__(self): return list(self._locations.keys()) def __init__(self): - from .utils import _parse, get_package, STRING_TYPES - self._parse = _parse - self.get_package = get_package - self.STRING_TYPES = STRING_TYPES + # from .utils import _parse, get_package, STRING_TYPES + from . import utils + self.utils = utils + self._parse = utils._parse + self.get_package = utils.get_package + self.STRING_TYPES = utils.STRING_TYPES self._modules = { - "pip": importlib.import_module("pip"), + "pip": importlib.import_module(self.BASE_IMPORT_PATH), + "pip_shims.utils": utils } self.pip_version = getattr(self._modules["pip"], "__version__") self.parsed_pip_version = self._parse(self.pip_version) @@ -85,6 +88,14 @@ def __init__(self): ("cmdoptions.index_group", "7.0.0", "18.0") ), "InstallRequirement": ("req.req_install.InstallRequirement", "7.0.0", "9999"), + "InstallationError": ("exceptions.InstallationError", "7.0.0", "9999"), + "UninstallationError": ("exceptions.UninstallationError", "7.0.0", "9999"), + "DistributionNotFound": ("exceptions.DistributionNotFound", "7.0.0", "9999"), + "RequirementsFileParseError": ("exceptions.RequirementsFileParseError", "7.0.0", "9999"), + "BestVersionAlreadyInstalled": ("exceptions.BestVersionAlreadyInstalled", "7.0.0", "9999"), + "BadCommand": ("exceptions.BadCommand", "7.0.0", "9999"), + "CommandError": ("exceptions.CommandError", "7.0.0", "9999"), + "PreviousBuildDirError": ("exceptions.PreviousBuildDirError", "7.0.0", "9999"), "install_req_from_editable": ( ("req.constructors.install_req_from_editable", "18.1", "9999"), ("req.req_install.InstallRequirement.from_editable", "7.0.0", "18.0") From 3f8824fad7ccb63bbd6c3b4dde3cc54bdbd8b774 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 01:02:47 -0400 Subject: [PATCH 19/35] Add deprecation warning ignores to pytest.ini Signed-off-by: Dan Ryan --- pytest.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pytest.ini b/pytest.ini index eee579da2c..48dfab025a 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,3 +3,6 @@ addopts = -ra -n auto testpaths = tests/ ; Add vendor and patched in addition to the default list of ignored dirs norecursedirs = .* build dist CVS _darcs {arch} *.egg vendor patched news tasks docs +filterwarnings = + ignore::DeprecationWarning + ignore::PendingDeprecationWarning From 6c8ad874809d930f43595bfbec0adefba29b0dff Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 01:05:10 -0400 Subject: [PATCH 20/35] Update pip_shims self-import module replacement logic with new names Signed-off-by: Dan Ryan --- pipenv/vendor/pip_shims/__init__.py | 4 ++-- .../patches/vendor/pip_shims_module_names.patch | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 tasks/vendoring/patches/vendor/pip_shims_module_names.patch diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py index 1342f79341..70fb0d5870 100644 --- a/pipenv/vendor/pip_shims/__init__.py +++ b/pipenv/vendor/pip_shims/__init__.py @@ -8,10 +8,10 @@ from . import shims -old_module = sys.modules["pip_shims"] +old_module = sys.modules[__name__] -module = sys.modules["pip_shims"] = shims._new() +module = sys.modules[__name__] = shims._new() module.shims = shims module.__dict__.update({ '__file__': __file__, diff --git a/tasks/vendoring/patches/vendor/pip_shims_module_names.patch b/tasks/vendoring/patches/vendor/pip_shims_module_names.patch new file mode 100644 index 0000000000..8658dcaaf0 --- /dev/null +++ b/tasks/vendoring/patches/vendor/pip_shims_module_names.patch @@ -0,0 +1,17 @@ +diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py +index 1342f793..70fb0d58 100644 +--- a/pipenv/vendor/pip_shims/__init__.py ++++ b/pipenv/vendor/pip_shims/__init__.py +@@ -8,10 +8,10 @@ __version__ = '0.3.1' + from . import shims + + +-old_module = sys.modules["pip_shims"] ++old_module = sys.modules[__name__] + + +-module = sys.modules["pip_shims"] = shims._new() ++module = sys.modules[__name__] = shims._new() + module.shims = shims + module.__dict__.update({ + '__file__': __file__, From 339c50e3fb6cb0d58a4ba2ebdf76c0bc470e610b Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 01:06:28 -0400 Subject: [PATCH 21/35] update vendored shims version Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index b9f091e193..cac0801dba 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -42,7 +42,7 @@ shutilwhich==1.1.0 toml==0.10.0 cached-property==1.4.3 vistir==0.1.6 -pip-shims==0.3.0 +pip-shims==0.3.1 ptyprocess==0.6.0 enum34==1.1.6 yaspin==0.14.0 From 742988169333ba14a4b2b6f527a604d6f0bc9e09 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 01:21:05 -0400 Subject: [PATCH 22/35] Add a proper parser for index urls internally - Fix invocation of vcs hash retrieval - Add error handling for pip url retrieval Signed-off-by: Dan Ryan --- pipenv/core.py | 29 +++++++------ pipenv/patched/notpip/_internal/index.py | 5 ++- pipenv/project.py | 18 +++++++- pipenv/utils.py | 46 ++++++++++++++++++--- tasks/vendoring/patches/patched/pip18.patch | 16 +++++++ 5 files changed, 93 insertions(+), 21 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 1bfb2e1c08..84834d809d 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -39,6 +39,7 @@ rmtree, fs_str, clean_resolved_dep, + parse_indexes ) from ._compat import TemporaryDirectory, Path from . import environments, pep508checker, progress @@ -215,10 +216,14 @@ def import_requirements(r=None, dev=False): with open(r, "r") as f: contents = f.read() indexes = [] + trusted_hosts = [] # Find and add extra indexes. for line in contents.split("\n"): - if line.startswith(("-i ", "--index ", "--index-url ")): - indexes.append(line.split()[1]) + line_indexes, _trusted_hosts, _ = parse_indexes(line.strip()) + indexes.extend(line_indexes) + trusted_hosts.extend(_trusted_hosts) + indexes = sorted(set(indexes)) + trusted_hosts = sorted(set(trusted_hosts)) reqs = [f for f in parse_requirements(r, session=pip_requests)] for package in reqs: if package.name not in BAD_PACKAGES: @@ -232,7 +237,8 @@ def import_requirements(r=None, dev=False): else: project.add_package_to_pipfile(str(package.req), dev=dev) for index in indexes: - project.add_index_to_pipfile(index) + trusted = index in trusted_hosts + project.add_index_to_pipfile(index, trusted_host=trusted) project.recase_pipfile() @@ -741,17 +747,14 @@ def cleanup_procs(procs, concurrent): for dep, ignore_hash, block in deps_list_bar: if len(procs) < PIPENV_MAX_SUBPROCESS: # Use a specific index, if specified. + indexes, trusted_hosts, dep = parse_indexes(dep) index = None - if " --index" in dep: - dep, _, index = dep.partition(" --index") - index = index.lstrip("=") - elif " -i " in dep: - dep, _, index = dep.partition(" -i ") extra_indexes = [] - if "--extra-index-url" in dep: - split_dep = dep.split("--extra-index-url") - dep, extra_indexes = split_dep[0], split_dep[1:] - dep = Requirement.from_line(dep) + if indexes: + index = indexes[0] + if len(indexes) > 0: + extra_indexes = indexes[1:] + dep = Requirement.from_line(" ".join(dep)) if index: _index = None try: @@ -1335,7 +1338,7 @@ def pip_install( sources = [{"url": index}] if extra_indexes: if isinstance(extra_indexes, six.string_types): - extra_indexes = [extra_indexes] + extra_indexes = [extra_indexes,] for idx in extra_indexes: try: extra_src = project.find_source(idx).get("url") diff --git a/pipenv/patched/notpip/_internal/index.py b/pipenv/patched/notpip/_internal/index.py index 793dd1cb93..426880e987 100644 --- a/pipenv/patched/notpip/_internal/index.py +++ b/pipenv/patched/notpip/_internal/index.py @@ -477,7 +477,10 @@ def find_all_candidates(self, project_name): page_versions = [] for page in self._get_pages(url_locations, project_name): - logger.debug('Analyzing links from page %s', page.url) + try: + logger.debug('Analyzing links from page %s', page.url) + except AttributeError: + continue with indent_log(): page_versions.extend( self._package_versions(page.links, search) diff --git a/pipenv/project.py b/pipenv/project.py index f9decbf641..b02e94d8c6 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -792,11 +792,25 @@ def add_package_to_pipfile(self, package, dev=False): # Write Pipfile. self.write_toml(p) - def add_index_to_pipfile(self, index): + def add_index_to_pipfile(self, index, verify_ssl=True): """Adds a given index to the Pipfile.""" # Read and append Pipfile. p = self.parsed_pipfile - source = {"url": index, "verify_ssl": True} + try: + self.get_source(url=index) + except SourceNotFound: + source = {"url": index, "verify_ssl": verify_ssl} + else: + return + name, _, tld_guess = six.moves.urllib.parse.urlsplit(index).netloc.rpartition(".") + src_name = name.replace(".", "") + try: + self.get_source(name=src_name) + except SourceNotFound: + source[name] = src_name + else: + from random import randint + source[name] = "{0}-{1}".format(src_name, randint(1, 1000)) # Add the package to the group. if "source" not in p: p["source"] = [source] diff --git a/pipenv/utils.py b/pipenv/utils.py index 0e1f851d1d..790aa42b31 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -250,8 +250,10 @@ class PipCommand(basecommand.Command): if not dep: continue url = None - if " -i " in dep: - dep, url = dep.split(" -i ") + indexes, trusted_hosts, remainder = parse_indexes(dep) + if indexes: + url = indexes[0] + dep = " ".join(remainder) req = Requirement.from_line(dep) # extra_constraints = [] @@ -524,15 +526,17 @@ def is_pinned(val): return isinstance(val, six.string_types) and val.startswith("==") -def convert_deps_to_pip(deps, project=None, r=True, include_index=False): +def convert_deps_to_pip(deps, project=None, r=True, include_index=True): """"Converts a Pipfile-formatted dependency to a pip-formatted one.""" from ._compat import NamedTemporaryFile from .vendor.requirementslib import Requirement dependencies = [] for dep_name, dep in deps.items(): - indexes = project.sources if hasattr(project, "sources") else None + indexes = project.sources if hasattr(project, "sources") else [] new_dep = Requirement.from_pipfile(dep_name, dep) + if new_dep.index: + include_index = True req = new_dep.as_line(sources=indexes if include_index else None).strip() dependencies.append(req) if not r: @@ -1169,7 +1173,7 @@ def get_vcs_deps( name = requirement.normalized_name commit_hash = None if requirement.is_vcs: - with requirement.req.locked_vcs_repo(src_dir=src_dir) as repo: + with locked_repository(requirement) as repo: commit_hash = repo.get_commit_hash() lockfile[name] = requirement.pipfile_entry[1] lockfile[name]['ref'] = commit_hash @@ -1307,6 +1311,22 @@ def is_virtual_environment(path): return False +@contextmanager +def locked_repository(requirement): + from pipenv.vendor.vistir.path import create_tracked_tempdir + src_dir = create_tracked_tempdir(prefix="pipenv-src") + if not requirement.is_vcs: + return + original_base = os.environ.pop("PIP_SHIMS_BASE_MODULE", None) + os.environ["PIP_SHIMS_BASE_MODULE"] = fs_str("pipenv.patched.notpip") + try: + with requirement.req.locked_vcs_repo(src_dir=src_dir) as repo: + yield repo + finally: + if original_base: + os.environ["PIP_SHIMS_BASE_MODULE"] = original_base + + @contextmanager def chdir(path): """Context manager to change working directories.""" @@ -1326,3 +1346,19 @@ def chdir(path): def looks_like_dir(path): seps = (sep for sep in (os.path.sep, os.path.altsep) if sep is not None) return any(sep in path for sep in seps) + + +def parse_indexes(line): + from argparse import ArgumentParser + parser = ArgumentParser("indexes") + parser.add_argument("--index", "-i", "--index-url", metavar="index_url", + action="store", nargs="?",) + parser.add_argument("--extra-index-url", "--extra-index", metavar="extra_indexes", + action="append") + parser.add_argument("--trusted-host", metavar="trusted_hosts", action="append") + args, remainder = parser.parse_known_args(line.split()) + index = [] if not args.index else [args.index,] + extra_indexes = [] if not args.extra_index_url else args.extra_index_url + indexes = index + extra_indexes + trusted_hosts = args.trusted_host if args.trusted_host else [] + return indexes, trusted_hosts, remainder diff --git a/tasks/vendoring/patches/patched/pip18.patch b/tasks/vendoring/patches/patched/pip18.patch index 60d06dcf6b..558c28e8f8 100644 --- a/tasks/vendoring/patches/patched/pip18.patch +++ b/tasks/vendoring/patches/patched/pip18.patch @@ -493,3 +493,19 @@ index fcf9d3d3..d8aff848 100644 SETUPTOOLS_SHIM % req.setup_py ] + list(self.global_options) +diff --git a/pipenv/patched/pip/_internal/index.py b/pipenv/patched/pip/_internal/index.py +index 793dd1cb..426880e9 100644 +--- a/pipenv/patched/pip/_internal/index.py ++++ b/pipenv/patched/pip/_internal/index.py +@@ -477,7 +477,10 @@ class PackageFinder(object): + + page_versions = [] + for page in self._get_pages(url_locations, project_name): +- logger.debug('Analyzing links from page %s', page.url) ++ try: ++ logger.debug('Analyzing links from page %s', page.url) ++ except AttributeError: ++ continue + with indent_log(): + page_versions.extend( + self._package_versions(page.links, search) From 6e042b3b62a026e452f875323887497362b4da09 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 01:41:21 -0400 Subject: [PATCH 23/35] fix piptools patch to account for None in hash lookups Signed-off-by: Dan Ryan --- pipenv/patched/piptools/repositories/pypi.py | 10 +++++----- tasks/vendoring/patches/patched/piptools.patch | 9 +++++---- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 2b156073a8..76e1f8849d 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -68,10 +68,10 @@ def get_hash(self, location): hash_value = self.get(new_location.url) if not hash_value: hash_value = self._get_file_hash(new_location) if not new_location.url.startswith("ssh") else None - hash_value = hash_value.encode('utf8') + hash_value = hash_value.encode('utf8') if hash_value else None if can_hash: self.set(new_location.url, hash_value) - return hash_value.decode('utf8') + return hash_value.decode('utf8') if hash_value else None def _get_file_hash(self, location): h = hashlib.new(FAVORITE_HASH) @@ -277,7 +277,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= 'isolated': False, 'wheel_cache': wheel_cache, 'use_user_site': False, - 'ignore_compatibility': False + 'ignore_compatibility': True } resolver = None preparer = None @@ -456,8 +456,8 @@ def get_hashes(self, ireq): # matching_candidates = candidates_by_version[matching_versions[0]] return { - self._hash_cache.get_hash(candidate.location) - for candidate in matching_candidates + h for h in map(lambda c: self._hash_cache.get_hash(c.location), + matching_candidates) if h is not None } @contextmanager diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 5e219609f4..152420af1a 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -152,10 +152,10 @@ index bf69803..eb20560 100644 + hash_value = self.get(new_location.url) + if not hash_value: + hash_value = self._get_file_hash(new_location) if not new_location.url.startswith("ssh") else None -+ hash_value = hash_value.encode('utf8') ++ hash_value = hash_value.encode('utf8') if hash_value else None + if can_hash: + self.set(new_location.url, hash_value) -+ return hash_value.decode('utf8') ++ return hash_value.decode('utf8') if hash_value else None + + def _get_file_hash(self, location): + h = hashlib.new(FAVORITE_HASH) @@ -473,8 +473,9 @@ index bf69803..eb20560 100644 return { - self._get_file_hash(candidate.location) -+ self._hash_cache.get_hash(candidate.location) - for candidate in matching_candidates +- for candidate in matching_candidates ++ h for h in map(lambda c: self._hash_cache.get_hash(c.location), ++ matching_candidates) if h is not None } - def _get_file_hash(self, location): From 71ed41300d1cebbacbba6191e877fcc11ec2c5d7 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 04:57:25 -0400 Subject: [PATCH 24/35] Remove extra piptools license - Fixes 2678 Signed-off-by: Dan Ryan --- pipenv/patched/piptools/LICENSE.txt | 17 ----------------- 1 file changed, 17 deletions(-) delete mode 100644 pipenv/patched/piptools/LICENSE.txt diff --git a/pipenv/patched/piptools/LICENSE.txt b/pipenv/patched/piptools/LICENSE.txt deleted file mode 100644 index 89de354795..0000000000 --- a/pipenv/patched/piptools/LICENSE.txt +++ /dev/null @@ -1,17 +0,0 @@ -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. From bf64e7b55a77846fcfd68be3289d6499b7cdf197 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 05:21:02 -0400 Subject: [PATCH 25/35] Add news fragments and final patch for all fixes - Fixes #2499 - Fixes #2529 - Fixes #2589 - Fixes #2666 - Fixes #2767 - Fixes #2785 - Fixes #2795 - Fixes #2801 - Fixes #2802 - Fixes #2824 - Fixes #2862 - Fixes #2867 - Fixes #2879 - Fixes #2880 - Fixes #2894 - Fixes #2902 - Fixes #2924 Signed-off-by: Dan Ryan --- news/2499.bugfix | 1 + news/2529.bugfix | 1 + news/2589.bugfix | 1 + news/2666.bugfix | 1 + news/2767.bugfix | 1 + news/2785.bugfix | 1 + news/2795.bugfix | 1 + news/2801.bugfix | 1 + news/2802.bugfix | 19 +++++++++++ news/2824.bugfix | 1 + news/2862.bugfix | 1 + news/2867.bugfix | 19 +++++++++++ news/2879.bugfix | 1 + news/2880.bugfix | 19 +++++++++++ news/2894.bugfix | 1 + news/2902.vendor | 19 +++++++++++ news/2924.bugfix | 1 + news/2933.bugfix | 1 + news/2935.vendor | 19 +++++++++++ pipenv/patched/piptools/repositories/pypi.py | 7 ++-- .../vendoring/patches/patched/piptools.patch | 34 +++++++++---------- 21 files changed, 130 insertions(+), 20 deletions(-) create mode 100644 news/2499.bugfix create mode 100644 news/2529.bugfix create mode 100644 news/2589.bugfix create mode 100644 news/2666.bugfix create mode 100644 news/2767.bugfix create mode 100644 news/2785.bugfix create mode 100644 news/2795.bugfix create mode 100644 news/2801.bugfix create mode 100644 news/2802.bugfix create mode 100644 news/2824.bugfix create mode 100644 news/2862.bugfix create mode 100644 news/2867.bugfix create mode 100644 news/2879.bugfix create mode 100644 news/2880.bugfix create mode 100644 news/2894.bugfix create mode 100644 news/2902.vendor create mode 100644 news/2924.bugfix create mode 100644 news/2933.bugfix create mode 100644 news/2935.vendor diff --git a/news/2499.bugfix b/news/2499.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2499.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2529.bugfix b/news/2529.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2529.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2589.bugfix b/news/2589.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2589.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2666.bugfix b/news/2666.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2666.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2767.bugfix b/news/2767.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2767.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2785.bugfix b/news/2785.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2785.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2795.bugfix b/news/2795.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2795.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2801.bugfix b/news/2801.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2801.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2802.bugfix b/news/2802.bugfix new file mode 100644 index 0000000000..e0054d8c41 --- /dev/null +++ b/news/2802.bugfix @@ -0,0 +1,19 @@ +Updated vendored dependencies: + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip 10.0.1 => 18.0`` + - ``click 6.7 => 7.0`` + - ``toml 0.9.4 => 0.10.0`` + - ``pyparsing 2.2.0 => 2.2.2`` + - ``delegator 0.1.0 => 0.1.1`` + - ``attrs 18.1.0 => 18.2.0`` + - ``distlib 0.2.7 => 0.2.8`` + - ``packaging 17.1.0 => 18.0`` + - ``passa 0.2.0 => 0.3.1`` + - ``pip_shims 0.1.2 => 0.3.1`` + - ``plette 0.1.1 => 0.2.2`` + - ``pythonfinder 1.0.2 => 1.1.0`` + - ``pytoml 0.1.18 => 0.1.19`` + - ``requirementslib 1.1.16 => 1.1.17`` + - ``shellingham 1.2.4 => 1.2.6`` + - ``tomlkit 0.4.2 => 0.4.4`` + - ``vistir 0.1.4 => 0.1.6`` diff --git a/news/2824.bugfix b/news/2824.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2824.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2862.bugfix b/news/2862.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2862.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2867.bugfix b/news/2867.bugfix new file mode 100644 index 0000000000..e0054d8c41 --- /dev/null +++ b/news/2867.bugfix @@ -0,0 +1,19 @@ +Updated vendored dependencies: + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip 10.0.1 => 18.0`` + - ``click 6.7 => 7.0`` + - ``toml 0.9.4 => 0.10.0`` + - ``pyparsing 2.2.0 => 2.2.2`` + - ``delegator 0.1.0 => 0.1.1`` + - ``attrs 18.1.0 => 18.2.0`` + - ``distlib 0.2.7 => 0.2.8`` + - ``packaging 17.1.0 => 18.0`` + - ``passa 0.2.0 => 0.3.1`` + - ``pip_shims 0.1.2 => 0.3.1`` + - ``plette 0.1.1 => 0.2.2`` + - ``pythonfinder 1.0.2 => 1.1.0`` + - ``pytoml 0.1.18 => 0.1.19`` + - ``requirementslib 1.1.16 => 1.1.17`` + - ``shellingham 1.2.4 => 1.2.6`` + - ``tomlkit 0.4.2 => 0.4.4`` + - ``vistir 0.1.4 => 0.1.6`` diff --git a/news/2879.bugfix b/news/2879.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2879.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2880.bugfix b/news/2880.bugfix new file mode 100644 index 0000000000..e0054d8c41 --- /dev/null +++ b/news/2880.bugfix @@ -0,0 +1,19 @@ +Updated vendored dependencies: + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip 10.0.1 => 18.0`` + - ``click 6.7 => 7.0`` + - ``toml 0.9.4 => 0.10.0`` + - ``pyparsing 2.2.0 => 2.2.2`` + - ``delegator 0.1.0 => 0.1.1`` + - ``attrs 18.1.0 => 18.2.0`` + - ``distlib 0.2.7 => 0.2.8`` + - ``packaging 17.1.0 => 18.0`` + - ``passa 0.2.0 => 0.3.1`` + - ``pip_shims 0.1.2 => 0.3.1`` + - ``plette 0.1.1 => 0.2.2`` + - ``pythonfinder 1.0.2 => 1.1.0`` + - ``pytoml 0.1.18 => 0.1.19`` + - ``requirementslib 1.1.16 => 1.1.17`` + - ``shellingham 1.2.4 => 1.2.6`` + - ``tomlkit 0.4.2 => 0.4.4`` + - ``vistir 0.1.4 => 0.1.6`` diff --git a/news/2894.bugfix b/news/2894.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2894.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2902.vendor b/news/2902.vendor new file mode 100644 index 0000000000..e0054d8c41 --- /dev/null +++ b/news/2902.vendor @@ -0,0 +1,19 @@ +Updated vendored dependencies: + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip 10.0.1 => 18.0`` + - ``click 6.7 => 7.0`` + - ``toml 0.9.4 => 0.10.0`` + - ``pyparsing 2.2.0 => 2.2.2`` + - ``delegator 0.1.0 => 0.1.1`` + - ``attrs 18.1.0 => 18.2.0`` + - ``distlib 0.2.7 => 0.2.8`` + - ``packaging 17.1.0 => 18.0`` + - ``passa 0.2.0 => 0.3.1`` + - ``pip_shims 0.1.2 => 0.3.1`` + - ``plette 0.1.1 => 0.2.2`` + - ``pythonfinder 1.0.2 => 1.1.0`` + - ``pytoml 0.1.18 => 0.1.19`` + - ``requirementslib 1.1.16 => 1.1.17`` + - ``shellingham 1.2.4 => 1.2.6`` + - ``tomlkit 0.4.2 => 0.4.4`` + - ``vistir 0.1.4 => 0.1.6`` diff --git a/news/2924.bugfix b/news/2924.bugfix new file mode 100644 index 0000000000..d4bb646092 --- /dev/null +++ b/news/2924.bugfix @@ -0,0 +1 @@ +Fixed various bugs related to ``pip 18.1`` release which prevented locking, installation, and syncing, and dumping to a ``requirements.txt`` file. diff --git a/news/2933.bugfix b/news/2933.bugfix new file mode 100644 index 0000000000..16224c34ec --- /dev/null +++ b/news/2933.bugfix @@ -0,0 +1 @@ +Fixed non-deterministic resolution issues related to changes to the internal package finder in ``pip 10``. diff --git a/news/2935.vendor b/news/2935.vendor new file mode 100644 index 0000000000..e0054d8c41 --- /dev/null +++ b/news/2935.vendor @@ -0,0 +1,19 @@ +Updated vendored dependencies: + - ``pip-tools`` (updated and patched to latest w/ ``pip 18.0`` compatibilty) + - ``pip 10.0.1 => 18.0`` + - ``click 6.7 => 7.0`` + - ``toml 0.9.4 => 0.10.0`` + - ``pyparsing 2.2.0 => 2.2.2`` + - ``delegator 0.1.0 => 0.1.1`` + - ``attrs 18.1.0 => 18.2.0`` + - ``distlib 0.2.7 => 0.2.8`` + - ``packaging 17.1.0 => 18.0`` + - ``passa 0.2.0 => 0.3.1`` + - ``pip_shims 0.1.2 => 0.3.1`` + - ``plette 0.1.1 => 0.2.2`` + - ``pythonfinder 1.0.2 => 1.1.0`` + - ``pytoml 0.1.18 => 0.1.19`` + - ``requirementslib 1.1.16 => 1.1.17`` + - ``shellingham 1.2.4 => 1.2.6`` + - ``tomlkit 0.4.2 => 0.4.4`` + - ``vistir 0.1.4 => 0.1.6`` diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 76e1f8849d..163d5102a2 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -277,7 +277,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= 'isolated': False, 'wheel_cache': wheel_cache, 'use_user_site': False, - 'ignore_compatibility': True + 'ignore_compatibility': False } resolver = None preparer = None @@ -292,7 +292,10 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= # reqset.add_requirement(ireq) resolver = PipResolver(**resolver_kwargs) resolver.require_hashes = False - results = resolver._resolve_one(reqset, ireq) + try: + results = resolver._resolve_one(reqset, ireq) + except InstallationError: + pass reqset.cleanup_files() if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 152420af1a..dd009027c9 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -74,7 +74,7 @@ index 08dabe1..480ad1e 100644 else: return self.repository.find_best_match(ireq, prereleases) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py -index bf69803..eb20560 100644 +index bf69803..163d510 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -1,7 +1,7 @@ @@ -100,7 +100,7 @@ index bf69803..eb20560 100644 +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier +from packaging.markers import Op, Value, Variable, Marker -+InstallationError = do_import(("InstallationError", "exceptions.InstallationError", "7.0", "9999")) ++InstallationError = do_import(("exceptions.InstallationError", "7.0", "9999")) +from notpip._internal.resolve import Resolver as PipResolver + @@ -221,8 +221,7 @@ index bf69803..eb20560 100644 + def gen(ireq): + if self.DEFAULT_INDEX_URL not in self.finder.index_urls: + return - -- def resolve_reqs(self, download_dir, ireq, wheel_cache): ++ + url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name) + releases = self.session.get(url).json()['releases'] + @@ -251,7 +250,8 @@ index bf69803..eb20560 100644 + try: + if ireq not in self._json_dep_cache: + self._json_dep_cache[ireq] = [g for g in gen(ireq)] -+ + +- def resolve_reqs(self, download_dir, ireq, wheel_cache): + return set(self._json_dep_cache[ireq]) + except Exception: + return set() @@ -296,13 +296,7 @@ index bf69803..eb20560 100644 else: # pip >= 10 preparer_kwargs = { -@@ -153,19 +264,20 @@ class PyPIRepository(BaseRepository): - 'download_dir': download_dir, - 'wheel_download_dir': self._wheel_download_dir, - 'progress_bar': 'off', - 'build_isolation': False - } - resolver_kwargs = { +@@ -159,13 +270,14 @@ class PyPIRepository(BaseRepository): 'finder': self.finder, 'session': self.session, 'upgrade_strategy': "to-satisfy-only", @@ -320,7 +314,7 @@ index bf69803..eb20560 100644 } resolver = None preparer = None -@@ -177,15 +289,98 @@ class PyPIRepository(BaseRepository): +@@ -177,15 +289,101 @@ class PyPIRepository(BaseRepository): resolver_kwargs['preparer'] = preparer reqset = RequirementSet() ireq.is_direct = True @@ -328,7 +322,11 @@ index bf69803..eb20560 100644 + # reqset.add_requirement(ireq) resolver = PipResolver(**resolver_kwargs) resolver.require_hashes = False - results = resolver._resolve_one(reqset, ireq) +- results = resolver._resolve_one(reqset, ireq) ++ try: ++ results = resolver._resolve_one(reqset, ireq) ++ except InstallationError: ++ pass reqset.cleanup_files() - return set(results) @@ -422,7 +420,7 @@ index bf69803..eb20560 100644 """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). -@@ -200,6 +395,7 @@ class PyPIRepository(BaseRepository): +@@ -200,6 +398,7 @@ class PyPIRepository(BaseRepository): # If a download_dir is passed, pip will unnecessarely # archive the entire source directory download_dir = None @@ -430,7 +428,7 @@ index bf69803..eb20560 100644 elif ireq.link and not ireq.link.is_artifact: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. -@@ -214,7 +410,8 @@ class PyPIRepository(BaseRepository): +@@ -214,7 +413,8 @@ class PyPIRepository(BaseRepository): wheel_cache = WheelCache(CACHE_DIR, self.pip_options.format_control) prev_tracker = os.environ.get('PIP_REQ_TRACKER') try: @@ -440,7 +438,7 @@ index bf69803..eb20560 100644 finally: if 'PIP_REQ_TRACKER' in os.environ: if prev_tracker: -@@ -236,6 +433,10 @@ class PyPIRepository(BaseRepository): +@@ -236,6 +436,10 @@ class PyPIRepository(BaseRepository): if ireq.editable: return set() @@ -451,7 +449,7 @@ index bf69803..eb20560 100644 if not is_pinned_requirement(ireq): raise TypeError( "Expected pinned requirement, got {}".format(ireq)) -@@ -243,24 +444,22 @@ class PyPIRepository(BaseRepository): +@@ -243,24 +447,22 @@ class PyPIRepository(BaseRepository): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. From c3303d5c8348c4d34f82c468a17da8a1039b95fd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 05:42:57 -0400 Subject: [PATCH 26/35] Fix manifest Signed-off-by: Dan Ryan --- MANIFEST.in | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index e5cc05f2b1..a8d08c6cab 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -8,6 +8,7 @@ recursive-include pipenv *.md *.APACHE *.BSD recursive-include pipenv Makefile recursive-include pipenv/vendor vendor.txt recursive-include pipenv README +recursive-include pipenv *.json include pipenv/patched/notpip/_vendor/vendor.txt include pipenv/patched/safety.zip pipenv/patched/patched.txt include pipenv/vendor/pipreqs/stdlib pipenv/vendor/pipreqs/mapping @@ -23,7 +24,8 @@ recursive-include docs Makefile *.rst *.py *.bat recursive-include docs/_templates *.html recursive-include docs/_static *.js *.css *.png recursive-exclude docs requirements*.txt - +recursive-exclude pipenv *.pyi +recursive-exclude pipenv *.typed prune peeps prune .buildkite From 1605c00786842085b51095bf3a5c9bdb8c770752 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 14:38:02 -0400 Subject: [PATCH 27/35] Fix environment variable encoding for python 2 Signed-off-by: Dan Ryan --- pipenv/core.py | 4 ++-- pipenv/environments.py | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 84834d809d..17827ea047 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -2037,7 +2037,7 @@ def do_shell(three=None, python=False, fancy=False, shell_args=None, pypi_mirror # Ensure that virtualenv is available. ensure_project(three=three, python=python, validate=False, pypi_mirror=pypi_mirror) # Set an environment variable, so we know we're in the environment. - os.environ["PIPENV_ACTIVE"] = "1" + os.environ["PIPENV_ACTIVE"] = fs_str("1") # Support shell compatibility mode. if PIPENV_SHELL_FANCY: fancy = True @@ -2114,7 +2114,7 @@ def inline_activate_virtual_environment(): else: _inline_activate_virtualenv() if "VIRTUAL_ENV" not in os.environ: - os.environ["VIRTUAL_ENV"] = root + os.environ["VIRTUAL_ENV"] = fs_str(root) def _launch_windows_subprocess(script): diff --git a/pipenv/environments.py b/pipenv/environments.py index 182e544278..f93f5db601 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -1,18 +1,19 @@ import os import sys from appdirs import user_cache_dir +from .utils import fs_str # HACK: avoid resolver.py uses the wrong byte code files. # I hope I can remove this one day. -os.environ["PYTHONDONTWRITEBYTECODE"] = "1" +os.environ["PYTHONDONTWRITEBYTECODE"] = fs_str("1") # HACK: Prevent invalid shebangs with Homebrew-installed Python: # https://bugs.python.org/issue22490 os.environ.pop("__PYVENV_LAUNCHER__", None) # Load patched pip instead of system pip -os.environ["PIP_SHIMS_BASE_MODULE"] = "pipenv.patched.notpip" +os.environ["PIP_SHIMS_BASE_MODULE"] = fs_str("pipenv.patched.notpip") PIPENV_CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) """Location for Pipenv to store it's package cache. From 1000bdc751684f7dc5c0207954b2da11d747dc1e Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 14:57:27 -0400 Subject: [PATCH 28/35] move environment imports local so we can use fs_str in env declarations Signed-off-by: Dan Ryan --- pipenv/utils.py | 41 +++++------------------------------------ 1 file changed, 5 insertions(+), 36 deletions(-) diff --git a/pipenv/utils.py b/pipenv/utils.py index 790aa42b31..dfc33cd615 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -34,21 +34,14 @@ def detach(self): from time import time -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - from distutils.spawn import find_executable from contextlib import contextmanager from . import environments from .pep508checker import lookup -from .environments import PIPENV_MAX_ROUNDS, PIPENV_CACHE_DIR, PIPENV_MAX_RETRIES -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping +six.add_move(six.MovedAttribute("Mapping", "collections", "collections.abc")) +from six.moves.urllib.parse import urlparse +from six.moves import Mapping if six.PY2: @@ -66,6 +59,7 @@ class ResourceWarning(Warning): def _get_requests_session(): """Load requests lazily.""" global requests_session + from .environments import PIPENV_MAX_RETRIES if requests_session is not None: return requests_session import requests @@ -235,6 +229,7 @@ def actually_resolve_deps( from pipenv.patched.piptools.exceptions import NoCandidateFound from .vendor.requirementslib import Requirement from ._compat import TemporaryDirectory, NamedTemporaryFile + from .environments import PIPENV_MAX_ROUNDS, PIPENV_CACHE_DIR class PipCommand(basecommand.Command): """Needed for pip-tools.""" @@ -978,32 +973,6 @@ def download_file(url, filename): f.write(r.content) -def need_update_check(): - """Determines whether we need to check for updates.""" - mkdir_p(PIPENV_CACHE_DIR) - p = os.sep.join((PIPENV_CACHE_DIR, ".pipenv_update_check")) - if not os.path.exists(p): - return True - - out_of_date_time = time() - (24 * 60 * 60) - if os.path.isfile(p) and os.path.getmtime(p) <= out_of_date_time: - return True - - else: - return False - - -def touch_update_stamp(): - """Touches PIPENV_CACHE_DIR/.pipenv_update_check""" - mkdir_p(PIPENV_CACHE_DIR) - p = os.sep.join((PIPENV_CACHE_DIR, ".pipenv_update_check")) - try: - os.utime(p, None) - except OSError: - with open(p, "w") as fh: - fh.write("") - - def normalize_drive(path): """Normalize drive in path so they stay consistent. From 734b4e7759d5283a28ca77438aeb108470da39c4 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 15:41:15 -0400 Subject: [PATCH 29/35] Clean up redundant code and avoid circular imports Signed-off-by: Dan Ryan --- pipenv/core.py | 57 ++++++++++++++++++++++++------------------ pipenv/environments.py | 2 +- pipenv/project.py | 57 +++++++++++++++++++++++++----------------- pipenv/shells.py | 4 +-- pipenv/utils.py | 30 ++++++---------------- 5 files changed, 77 insertions(+), 73 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 17827ea047..3ef6abd36d 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -14,6 +14,7 @@ import delegator import pipfile from blindspin import spinner +import vistir import six from .cmdparse import Script @@ -37,11 +38,9 @@ is_pinned, is_star, rmtree, - fs_str, clean_resolved_dep, - parse_indexes + parse_indexes, ) -from ._compat import TemporaryDirectory, Path from . import environments, pep508checker, progress from .environments import ( PIPENV_COLORBLIND, @@ -68,9 +67,7 @@ "wheel", ) -FIRST_PACKAGES = ( - "cython", -) +FIRST_PACKAGES = ("cython",) # Are we using the default Python? USING_DEFAULT_PYTHON = True if not PIPENV_HIDE_EMOJIS: @@ -886,7 +883,7 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): cmd.append("--system-site-packages") if pypi_mirror: - pip_config = {"PIP_INDEX_URL": fs_str(pypi_mirror)} + pip_config = {"PIP_INDEX_URL": vistir.misc.fs_str(pypi_mirror)} else: pip_config = {} @@ -909,7 +906,7 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): # This mimics Pew's "setproject". project_file_name = os.path.join(project.virtualenv_location, ".project") with open(project_file_name, "w") as f: - f.write(fs_str(project.project_directory)) + f.write(vistir.misc.fs_str(project.project_directory)) # Say where the virtualenv is. do_where(virtualenv=True, bare=False) @@ -1196,7 +1193,9 @@ def do_init( ensure_pipfile(system=system) if not requirements_dir: cleanup_reqdir = True - requirements_dir = TemporaryDirectory(suffix="-requirements", prefix="pipenv-") + requirements_dir = vistir.compat.TemporaryDirectory( + suffix="-requirements", prefix="pipenv-" + ) # Write out the lockfile if it doesn't exist, but not if the Pipfile is being ignored if (project.lockfile_exists and not ignore_pipfile) and not skip_lock: old_hash = project.get_lockfile_hash() @@ -1338,7 +1337,7 @@ def pip_install( sources = [{"url": index}] if extra_indexes: if isinstance(extra_indexes, six.string_types): - extra_indexes = [extra_indexes,] + extra_indexes = [extra_indexes] for idx in extra_indexes: try: extra_src = project.find_source(idx).get("url") @@ -1389,27 +1388,33 @@ def pip_install( if environments.is_verbose(): click.echo("$ {0}".format(pip_command), err=True) - cache_dir = Path(PIPENV_CACHE_DIR) + cache_dir = vistir.compat.Path(PIPENV_CACHE_DIR) pip_config = { - "PIP_CACHE_DIR": fs_str(cache_dir.as_posix()), - "PIP_WHEEL_DIR": fs_str(cache_dir.joinpath("wheels").as_posix()), - "PIP_DESTINATION_DIR": fs_str(cache_dir.joinpath("pkgs").as_posix()), - "PIP_EXISTS_ACTION": fs_str("w"), - "PATH": fs_str(os.environ.get("PATH")), + "PIP_CACHE_DIR": vistir.misc.fs_str(cache_dir.as_posix()), + "PIP_WHEEL_DIR": vistir.misc.fs_str(cache_dir.joinpath("wheels").as_posix()), + "PIP_DESTINATION_DIR": vistir.misc.fs_str( + cache_dir.joinpath("pkgs").as_posix() + ), + "PIP_EXISTS_ACTION": vistir.misc.fs_str("w"), + "PATH": vistir.misc.fs_str(os.environ.get("PATH")), } if src: - pip_config.update({"PIP_SRC": fs_str(project.virtualenv_src_location)}) + pip_config.update( + {"PIP_SRC": vistir.misc.fs_str(project.virtualenv_src_location)} + ) pip_command = Script.parse(pip_command).cmdify() c = delegator.run(pip_command, block=block, env=pip_config) return c def pip_download(package_name): - cache_dir = Path(PIPENV_CACHE_DIR) + cache_dir = vistir.compat.Path(PIPENV_CACHE_DIR) pip_config = { - "PIP_CACHE_DIR": fs_str(cache_dir.as_posix()), - "PIP_WHEEL_DIR": fs_str(cache_dir.joinpath("wheels").as_posix()), - "PIP_DESTINATION_DIR": fs_str(cache_dir.joinpath("pkgs").as_posix()), + "PIP_CACHE_DIR": vistir.misc.fs_str(cache_dir.as_posix()), + "PIP_WHEEL_DIR": vistir.misc.fs_str(cache_dir.joinpath("wheels").as_posix()), + "PIP_DESTINATION_DIR": vistir.misc.fs_str( + cache_dir.joinpath("pkgs").as_posix() + ), } for source in project.sources: cmd = '{0} download "{1}" -i {2} -d {3}'.format( @@ -1666,7 +1671,7 @@ def do_install( from .environments import PIPENV_VIRTUALENV, PIPENV_USE_SYSTEM from notpip._internal.exceptions import PipError - requirements_directory = TemporaryDirectory( + requirements_directory = vistir.compat.TemporaryDirectory( suffix="-requirements", prefix="pipenv-" ) if selective_upgrade: @@ -2037,7 +2042,7 @@ def do_shell(three=None, python=False, fancy=False, shell_args=None, pypi_mirror # Ensure that virtualenv is available. ensure_project(three=three, python=python, validate=False, pypi_mirror=pypi_mirror) # Set an environment variable, so we know we're in the environment. - os.environ["PIPENV_ACTIVE"] = fs_str("1") + os.environ["PIPENV_ACTIVE"] = vistir.misc.fs_str("1") # Support shell compatibility mode. if PIPENV_SHELL_FANCY: fancy = True @@ -2114,7 +2119,7 @@ def inline_activate_virtual_environment(): else: _inline_activate_virtualenv() if "VIRTUAL_ENV" not in os.environ: - os.environ["VIRTUAL_ENV"] = fs_str(root) + os.environ["VIRTUAL_ENV"] = vistir.misc.fs_str(root) def _launch_windows_subprocess(script): @@ -2471,7 +2476,9 @@ def do_sync( ) # Install everything. - requirements_dir = TemporaryDirectory(suffix="-requirements", prefix="pipenv-") + requirements_dir = vistir.compat.TemporaryDirectory( + suffix="-requirements", prefix="pipenv-" + ) do_init( dev=dev, concurrent=(not sequential), diff --git a/pipenv/environments.py b/pipenv/environments.py index f93f5db601..ca05535f33 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -1,7 +1,7 @@ import os import sys from appdirs import user_cache_dir -from .utils import fs_str +from .vendor.vistir.misc import fs_str # HACK: avoid resolver.py uses the wrong byte code files. diff --git a/pipenv/project.py b/pipenv/project.py index b02e94d8c6..0722619f1d 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -12,14 +12,11 @@ import pipfile import pipfile.api import six +import vistir import toml -from ._compat import Path - from .cmdparse import Script from .utils import ( - atomic_open_for_write, - mkdir_p, pep423_name, proper_case, find_requirements, @@ -33,7 +30,7 @@ is_star, get_workon_home, is_virtual_environment, - looks_like_dir + looks_like_dir, ) from .environments import ( PIPENV_MAX_DEPTH, @@ -50,7 +47,7 @@ def _normalized(p): if p is None: return None - loc = Path(p) + loc = vistir.compat.Path(p) if loc.is_absolute(): return normalize_drive(str(loc)) else: @@ -73,13 +70,15 @@ class _LockFileEncoder(json.JSONEncoder): * PrettyTOML's container elements are seamlessly encodable. * The output is always UTF-8-encoded text, never binary, even on Python 2. """ + def __init__(self): super(_LockFileEncoder, self).__init__( - indent=4, separators=(",", ": "), sort_keys=True, + indent=4, separators=(",", ": "), sort_keys=True ) def default(self, obj): from prettytoml.elements.common import ContainerElement, TokenElement + if isinstance(obj, (ContainerElement, TokenElement)): return obj.primitive_value return super(_LockFileEncoder, self).default(obj) @@ -277,19 +276,26 @@ def get_location_for_virtualenv(self): name = f.read().strip() # Assume file's contents is a path if it contains slashes. if looks_like_dir(name): - return Path(name).absolute().as_posix() + return vistir.compat.Path(name).absolute().as_posix() return str(get_workon_home().joinpath(name)) def get_installed_packages(self): from . import PIPENV_ROOT, PIPENV_VENDOR, PIPENV_PATCHED from .utils import temp_path, load_path, temp_environ + if self.virtualenv_exists: with temp_path(), temp_environ(): new_path = load_path(self.which("python")) - new_path = [new_path[0], PIPENV_ROOT, PIPENV_PATCHED, PIPENV_VENDOR] + new_path[1:] + new_path = [ + new_path[0], + PIPENV_ROOT, + PIPENV_PATCHED, + PIPENV_VENDOR, + ] + new_path[1:] sys.path = new_path - os.environ['VIRTUAL_ENV'] = self.virtualenv_location + os.environ["VIRTUAL_ENV"] = self.virtualenv_location from .vendor.pip_shims.shims import get_installed_distributions + return get_installed_distributions(local_only=True) else: return [] @@ -330,7 +336,7 @@ def get_name(name, location): # In-project venv # "Proper" path casing (on non-case-sensitive filesystems). if ( - fnmatch.fnmatch('A', 'a') + fnmatch.fnmatch("A", "a") or self.is_venv_in_project() or get_workon_home().joinpath(venv_name).exists() ): @@ -365,7 +371,7 @@ def virtualenv_location(self): if PIPENV_VIRTUALENV: return PIPENV_VIRTUALENV - if not self._virtualenv_location: # Use cached version, if available. + if not self._virtualenv_location: # Use cached version, if available. assert self.project_directory, "project not created" self._virtualenv_location = self.get_location_for_virtualenv() return self._virtualenv_location @@ -376,7 +382,7 @@ def virtualenv_src_location(self): loc = os.sep.join([self.virtualenv_location, "src"]) else: loc = os.sep.join([self.project_directory, "src"]) - mkdir_p(loc) + vistir.path.mkdir_p(loc) return loc @property @@ -385,13 +391,13 @@ def download_location(self): loc = os.sep.join([self.virtualenv_location, "downloads"]) self._download_location = loc # Create the directory, if it doesn't exist. - mkdir_p(self._download_location) + vistir.path.mkdir_p(self._download_location) return self._download_location @property def proper_names_db_path(self): if self._proper_names_db_path is None: - self._proper_names_db_path = Path( + self._proper_names_db_path = vistir.compat.Path( self.virtualenv_location, "pipenv-proper-names.txt" ) self._proper_names_db_path.touch() # Ensure the file exists. @@ -671,7 +677,10 @@ def write_toml(self, data, path=None): data[section][package].update(_data) formatted_data = toml.dumps(data).rstrip() - if Path(path).absolute() == Path(self.pipfile_location).absolute(): + if ( + vistir.compat.Path(path).absolute() + == vistir.compat.Path(self.pipfile_location).absolute() + ): newlines = self._pipfile_newlines else: newlines = DEFAULT_NEWLINES @@ -685,11 +694,10 @@ def write_lockfile(self, content): """Write out the lockfile. """ s = self._lockfile_encoder.encode(content) - open_kwargs = { - 'newline': self._lockfile_newlines, - 'encoding': 'utf-8', - } - with atomic_open_for_write(self.lockfile_location, **open_kwargs) as f: + open_kwargs = {"newline": self._lockfile_newlines, "encoding": "utf-8"} + with vistir.contextmanagers.atomic_open_for_write( + self.lockfile_location, **open_kwargs + ) as f: f.write(s) # Write newline at end of document. GH-319. # Only need '\n' here; the file object handles the rest. @@ -802,7 +810,9 @@ def add_index_to_pipfile(self, index, verify_ssl=True): source = {"url": index, "verify_ssl": verify_ssl} else: return - name, _, tld_guess = six.moves.urllib.parse.urlsplit(index).netloc.rpartition(".") + name, _, tld_guess = six.moves.urllib.parse.urlsplit(index).netloc.rpartition( + "." + ) src_name = name.replace(".", "") try: self.get_source(name=src_name) @@ -810,6 +820,7 @@ def add_index_to_pipfile(self, index, verify_ssl=True): source[name] = src_name else: from random import randint + source[name] = "{0}-{1}".format(src_name, randint(1, 1000)) # Add the package to the group. if "source" not in p: @@ -824,7 +835,7 @@ def recase_pipfile(self): self.write_toml(self.parsed_pipfile) def load_lockfile(self, expand_env_vars=True): - with io.open(self.lockfile_location, encoding='utf-8') as lock: + with io.open(self.lockfile_location, encoding="utf-8") as lock: j = json.load(lock) self._lockfile_newlines = preferred_newlines(lock) # lockfile is just a string diff --git a/pipenv/shells.py b/pipenv/shells.py index 70b236ea07..352c01f409 100644 --- a/pipenv/shells.py +++ b/pipenv/shells.py @@ -5,9 +5,9 @@ import subprocess import sys -from ._compat import get_terminal_size, Path from .environments import PIPENV_SHELL_EXPLICIT, PIPENV_SHELL, PIPENV_EMULATOR -from .utils import temp_environ +from .vendor.vistir.compat import get_terminal_size, Path +from .vendor.vistir.contextmanagers import temp_environ from .vendor import shellingham diff --git a/pipenv/utils.py b/pipenv/utils.py index dfc33cd615..8aeb0cff06 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -32,8 +32,6 @@ def detach(self): logging.basicConfig(level=logging.ERROR) -from time import time - from distutils.spawn import find_executable from contextlib import contextmanager from . import environments @@ -59,13 +57,14 @@ class ResourceWarning(Warning): def _get_requests_session(): """Load requests lazily.""" global requests_session - from .environments import PIPENV_MAX_RETRIES if requests_session is not None: return requests_session import requests requests_session = requests.Session() - adapter = requests.adapters.HTTPAdapter(max_retries=PIPENV_MAX_RETRIES) + adapter = requests.adapters.HTTPAdapter( + max_retries=environments.PIPENV_MAX_RETRIES + ) requests_session.mount("https://pypi.org/pypi", adapter) return requests_session @@ -229,7 +228,6 @@ def actually_resolve_deps( from pipenv.patched.piptools.exceptions import NoCandidateFound from .vendor.requirementslib import Requirement from ._compat import TemporaryDirectory, NamedTemporaryFile - from .environments import PIPENV_MAX_ROUNDS, PIPENV_CACHE_DIR class PipCommand(basecommand.Command): """Needed for pip-tools.""" @@ -283,7 +281,7 @@ class PipCommand(basecommand.Command): f.write(u"\n".join([_constraint for _constraint in constraints])) constraints_file = f.name pip_options, _ = pip_command.parser.parse_args(pip_args) - pip_options.cache_dir = PIPENV_CACHE_DIR + pip_options.cache_dir = environments.PIPENV_CACHE_DIR session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, use_json=False, session=session) constraints = parse_requirements( @@ -300,7 +298,7 @@ class PipCommand(basecommand.Command): # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages hashes = None try: - results = resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS) + results = resolver.resolve(max_rounds=environments.PIPENV_MAX_ROUNDS) hashes = resolver.resolve_hashes(results) resolved_tree.update(results) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: @@ -344,6 +342,7 @@ def venv_resolve_deps( allow_global=False, pypi_mirror=None, ): + from .vendor.vistir.misc import fs_str from .vendor import delegator from . import resolver import json @@ -1229,20 +1228,6 @@ def clean_resolved_dep(dep, is_top_level=False, pipfile_entry=None): return {name: lockfile} -def fs_str(string): - """Encodes a string into the proper filesystem encoding - - Borrowed from pip-tools - """ - if isinstance(string, str): - return string - assert not isinstance(string, bytes) - return string.encode(_fs_encoding) - - -_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() - - def get_workon_home(): from ._compat import Path @@ -1282,7 +1267,8 @@ def is_virtual_environment(path): @contextmanager def locked_repository(requirement): - from pipenv.vendor.vistir.path import create_tracked_tempdir + from .vendor.vistir.path import create_tracked_tempdir + from .vendor.vistir.misc import fs_str src_dir = create_tracked_tempdir(prefix="pipenv-src") if not requirement.is_vcs: return From 2590c4caab64509866a29259039ae3994911599f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 16:05:27 -0400 Subject: [PATCH 30/35] Expose fs_str in `utils.py` Signed-off-by: Dan Ryan --- pipenv/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pipenv/utils.py b/pipenv/utils.py index 8aeb0cff06..cb5748013f 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -14,6 +14,7 @@ from click import echo as click_echo from first import first +from vistir.misc import fs_str try: from weakref import finalize From 3a303ee3c8d0d30c0e081ff4ea8c8b06c0bcbe1d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 16:54:25 -0400 Subject: [PATCH 31/35] Update piptools patches Signed-off-by: Dan Ryan --- pipenv/patched/patched.txt | 2 +- .../vendoring/patches/patched/piptools.patch | 57 ++++++++++++------- 2 files changed, 38 insertions(+), 21 deletions(-) diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index d12555deaf..3ad4c1e934 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -2,6 +2,6 @@ safety git+https://github.com/jumpscale7/python-consistent-toml.git#egg=contoml crayons==0.1.2 pipfile==0.0.2 -git+https://github.com/jazzband/pip-tools.git@19a3b1f11d941b01209bb4fad4a2a16d15f67171#egg=piptools +pip-tools==3.1.0 prettytoml==0.3 pip==18.0 diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index dd009027c9..388d8c28ba 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -1,22 +1,28 @@ diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py -index 674674a..feadad8 100644 +index 1fa3805..c0ecec8 100644 --- a/pipenv/patched/piptools/_compat/__init__.py +++ b/pipenv/patched/piptools/_compat/__init__.py -@@ -27,4 +27,6 @@ from .pip_compat import ( +@@ -27,6 +27,8 @@ from .pip_compat import ( cmdoptions, get_installed_distributions, PyPI, + SafeFileCache, + InstallationError, + install_req_from_line, + install_req_from_editable, ) diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py -index 1ba2cc7..3ea0826 100644 +index 28da51f..de9b435 100644 --- a/pipenv/patched/piptools/_compat/pip_compat.py +++ b/pipenv/patched/piptools/_compat/pip_compat.py -@@ -1,9 +1,10 @@ +@@ -1,12 +1,13 @@ # -*- coding=utf-8 -*- import importlib +-import pip ++from pip_shims import pip_version + import pkg_resources + -def do_import(module_path, subimport=None, old_path=None): +def do_import(module_path, subimport=None, old_path=None, vendored_name=None): old_path = old_path or module_path @@ -26,7 +32,7 @@ index 1ba2cc7..3ea0826 100644 paths = [module_path, old_path] search_order = ["{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None] package = subimport if subimport else None -@@ -18,17 +19,19 @@ def do_import(module_path, subimport=None, old_path=None): +@@ -21,25 +22,28 @@ def do_import(module_path, subimport=None, old_path=None): return getattr(imported, package) @@ -60,6 +66,17 @@ index 1ba2cc7..3ea0826 100644 +PyPI = do_import('models.index', 'PyPI', vendored_name='notpip') +SafeFileCache = do_import('download', 'SafeFileCache', vendored_name='notpip') +InstallationError = do_import('exceptions', 'InstallationError', vendored_name='notpip') + + # pip 18.1 has refactored InstallRequirement constructors use by pip-tools. +-if pkg_resources.parse_version(pip.__version__) < pkg_resources.parse_version('18.1'): ++if pkg_resources.parse_version(pip_version) < pkg_resources.parse_version('18.1'): + install_req_from_line = InstallRequirement.from_line + install_req_from_editable = InstallRequirement.from_editable + else: +- install_req_from_line = do_import('req.constructors', 'install_req_from_line') +- install_req_from_editable = do_import('req.constructors', 'install_req_from_editable') ++ install_req_from_line = do_import('req.constructors', 'install_req_from_line', vendored_name="notpip") ++ install_req_from_editable = do_import('req.constructors', 'install_req_from_editable', vendored_name="notpip") diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py index 08dabe1..480ad1e 100644 --- a/pipenv/patched/piptools/repositories/local.py @@ -95,13 +112,13 @@ index bf69803..163d510 100644 + InstallRequirement, + SafeFileCache ) -+os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" ++os.environ["PIP_SHIMS_BASE_MODULE"] = "pip" +from pip_shims.shims import do_import, VcsSupport, WheelCache +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier +from packaging.markers import Op, Value, Variable, Marker +InstallationError = do_import(("exceptions.InstallationError", "7.0", "9999")) -+from notpip._internal.resolve import Resolver as PipResolver ++from pip._internal.resolve import Resolver as PipResolver + -from ..cache import CACHE_DIR @@ -115,7 +132,7 @@ index bf69803..163d510 100644 try: - from pip._internal.req.req_tracker import RequirementTracker -+ from notpip._internal.req.req_tracker import RequirementTracker ++ from pip._internal.req.req_tracker import RequirementTracker except ImportError: @contextmanager def RequirementTracker(): @@ -222,7 +239,7 @@ index bf69803..163d510 100644 + if self.DEFAULT_INDEX_URL not in self.finder.index_urls: + return + -+ url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name) ++ url = 'https:/pypi.org/pypi/{0}/json'.format(ireq.req.name) + releases = self.session.get(url).json()['releases'] + + matches = [ @@ -233,7 +250,7 @@ index bf69803..163d510 100644 + return + + release_requires = self.session.get( -+ 'https://pypi.org/pypi/{0}/{1}/json'.format( ++ 'https:/pypi.org/pypi/{0}/{1}/json'.format( + ireq.req.name, matches[0], + ), + ).json() @@ -277,7 +294,7 @@ index bf69803..163d510 100644 try: - from pip._internal.operations.prepare import RequirementPreparer - from pip._internal.resolve import Resolver as PipResolver -+ from notpip._internal.operations.prepare import RequirementPreparer ++ from pip._internal.operations.prepare import RequirementPreparer except ImportError: - # Pip 9 and below + # Pip 9 and below @@ -556,7 +573,7 @@ index c2d323c..d5a471d 100644 # Example: ['Werkzeug>=0.9', 'Jinja2>=2.4'] dependency_strings = self.dependency_cache[ireq] diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py -index a164334..6225d7e 100644 +index 2360a04..6f62eb9 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -4,6 +4,7 @@ from __future__ import (absolute_import, division, print_function, @@ -568,12 +585,12 @@ index a164334..6225d7e 100644 from collections import OrderedDict from contextlib import contextmanager @@ -11,11 +12,78 @@ from contextlib import contextmanager - from ._compat import InstallRequirement + from ._compat import install_req_from_line from .click import style -+from pip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier -+from pip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version -+from pip._vendor.packaging.markers import Marker, Op, Value, Variable ++from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier ++from pipenv.patched.notpip._vendor.packaging.version import Version, InvalidVersion, parse as parse_version ++from pipenv.patched.notpip._vendor.packaging.markers import Marker, Op, Value, Variable UNSAFE_PACKAGES = {'setuptools', 'distribute', 'pip'} @@ -616,7 +633,7 @@ index a164334..6225d7e 100644 + marker_str = ' and '.join(list(dedup(tuple(marker_list,)))) if marker_list else '' + new_markers = Marker(marker_str) + ireq.markers = new_markers -+ new_ireq = InstallRequirement.from_line(format_requirement(ireq)) ++ new_ireq = install_req_from_line(format_requirement(ireq)) + if ireq.constraint: + new_ireq.constraint = ireq.constraint + return new_ireq @@ -658,15 +675,15 @@ index a164334..6225d7e 100644 # Sort extras for stability extras_string = "[{}]".format(",".join(sorted(extras))) -- return InstallRequirement.from_line( +- return install_req_from_line( - str('{}{}=={}'.format(name, extras_string, version)), - constraint=constraint) + if not markers: -+ return InstallRequirement.from_line( ++ return install_req_from_line( + str('{}{}=={}'.format(name, extras_string, version)), + constraint=constraint) + else: -+ return InstallRequirement.from_line( ++ return install_req_from_line( + str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))), + constraint=constraint) From b58f902a190fc714ad21de7d6a4b3ff65c552e8d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 17:01:32 -0400 Subject: [PATCH 32/35] Fix pipdeptree patch Signed-off-by: Dan Ryan --- .../patches/vendor/pipdeptree-local-pip.patch | 18 ------------------ .../vendor/pipdeptree-updated-pip18.patch | 2 +- 2 files changed, 1 insertion(+), 19 deletions(-) delete mode 100644 tasks/vendoring/patches/vendor/pipdeptree-local-pip.patch diff --git a/tasks/vendoring/patches/vendor/pipdeptree-local-pip.patch b/tasks/vendoring/patches/vendor/pipdeptree-local-pip.patch deleted file mode 100644 index a3210177f4..0000000000 --- a/tasks/vendoring/patches/vendor/pipdeptree-local-pip.patch +++ /dev/null @@ -1,18 +0,0 @@ -diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py -index a62badf7..a2ea83fd 100644 ---- a/pipenv/vendor/pipdeptree.py -+++ b/pipenv/vendor/pipdeptree.py -@@ -14,10 +14,10 @@ except ImportError: - from ordereddict import OrderedDict - - try: -- from pipenv.patched.notpip._internal import get_installed_distributions -- from pipenv.patched.notpip._internal.operations.freeze import FrozenRequirement -+ from pip._internal import get_installed_distributions -+ from pip._internal.operations.freeze import FrozenRequirement - except ImportError: -- from pipenv.patched.notpip import get_installed_distributions, FrozenRequirement -+ from pip import get_installed_distributions, FrozenRequirement - - import pkg_resources - # inline: diff --git a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch b/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch index 1e447c6122..20e04f3493 100644 --- a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch +++ b/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch @@ -1,5 +1,5 @@ diff --git a/pipenv/vendor/pipdeptree.py b/pipenv/vendor/pipdeptree.py -index 7820aa5d..9cce0325 100644 +index 7820aa5..9cce032 100644 --- a/pipenv/vendor/pipdeptree.py +++ b/pipenv/vendor/pipdeptree.py @@ -13,11 +13,7 @@ try: From d289e46467114f1533d1db0db378398ed346eab7 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 17:03:59 -0400 Subject: [PATCH 33/35] Fix pipdeptree patch again Signed-off-by: Dan Ryan --- .../vendoring/patches/vendor/pipdeptree-updated-pip18.patch | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch b/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch index 20e04f3493..e3ff9bbf29 100644 --- a/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch +++ b/tasks/vendoring/patches/vendor/pipdeptree-updated-pip18.patch @@ -7,10 +7,10 @@ index 7820aa5..9cce032 100644 from ordereddict import OrderedDict -try: -- from pip._internal import get_installed_distributions -- from pip._internal.operations.freeze import FrozenRequirement +- from pipenv.patched.notpip._internal import get_installed_distributions +- from pipenv.patched.notpip._internal.operations.freeze import FrozenRequirement -except ImportError: -- from pip import get_installed_distributions, FrozenRequirement +- from pipenv.patched.notpip import get_installed_distributions, FrozenRequirement +from pipenv.vendor.pip_shims import get_installed_distributions, FrozenRequirement import pkg_resources From 16fb2d1012aeddf2dd138a15824b69671850641d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 17:57:53 -0400 Subject: [PATCH 34/35] Update piptools and fix patch Signed-off-by: Dan Ryan --- pipenv/patched/piptools/_compat/__init__.py | 2 ++ pipenv/patched/piptools/_compat/pip_compat.py | 12 ++++++++++++ pipenv/patched/piptools/repositories/pypi.py | 6 +++--- pipenv/patched/piptools/resolver.py | 4 ++-- pipenv/patched/piptools/scripts/compile.py | 4 ++-- pipenv/patched/piptools/utils.py | 8 ++++---- tasks/vendoring/patches/patched/piptools.patch | 7 ++++--- 7 files changed, 29 insertions(+), 14 deletions(-) diff --git a/pipenv/patched/piptools/_compat/__init__.py b/pipenv/patched/piptools/_compat/__init__.py index feadad892a..c0ecec8a3f 100644 --- a/pipenv/patched/piptools/_compat/__init__.py +++ b/pipenv/patched/piptools/_compat/__init__.py @@ -29,4 +29,6 @@ PyPI, SafeFileCache, InstallationError, + install_req_from_line, + install_req_from_editable, ) diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py index 3ea0826708..de9b4353d4 100644 --- a/pipenv/patched/piptools/_compat/pip_compat.py +++ b/pipenv/patched/piptools/_compat/pip_compat.py @@ -1,6 +1,9 @@ # -*- coding=utf-8 -*- import importlib +from pip_shims import pip_version +import pkg_resources + def do_import(module_path, subimport=None, old_path=None, vendored_name=None): old_path = old_path or module_path prefix = vendored_name if vendored_name else "pip" @@ -35,3 +38,12 @@ def do_import(module_path, subimport=None, old_path=None, vendored_name=None): PyPI = do_import('models.index', 'PyPI', vendored_name='notpip') SafeFileCache = do_import('download', 'SafeFileCache', vendored_name='notpip') InstallationError = do_import('exceptions', 'InstallationError', vendored_name='notpip') + +# pip 18.1 has refactored InstallRequirement constructors use by pip-tools. +if pkg_resources.parse_version(pip_version) < pkg_resources.parse_version('18.1'): + install_req_from_line = InstallRequirement.from_line + install_req_from_editable = InstallRequirement.from_editable +else: + install_req_from_line = do_import('req.constructors', 'install_req_from_line', vendored_name="notpip") + install_req_from_editable = do_import('req.constructors', 'install_req_from_editable', vendored_name="notpip") + diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 163d5102a2..3061ad34ad 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -25,7 +25,7 @@ from packaging.specifiers import SpecifierSet, Specifier from packaging.markers import Op, Value, Variable, Marker InstallationError = do_import(("exceptions.InstallationError", "7.0", "9999")) -from notpip._internal.resolve import Resolver as PipResolver +from pipenv.patched.notpip._internal.resolve import Resolver as PipResolver from pipenv.environments import PIPENV_CACHE_DIR as CACHE_DIR @@ -35,7 +35,7 @@ from .base import BaseRepository try: - from notpip._internal.req.req_tracker import RequirementTracker + from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker except ImportError: @contextmanager def RequirementTracker(): @@ -242,7 +242,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= setup_requires = {} dist = None try: - from notpip._internal.operations.prepare import RequirementPreparer + from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer except ImportError: # Pip 9 and below reqset = RequirementSet( diff --git a/pipenv/patched/piptools/resolver.py b/pipenv/patched/piptools/resolver.py index d5a471d443..1f3e18c1d6 100644 --- a/pipenv/patched/piptools/resolver.py +++ b/pipenv/patched/piptools/resolver.py @@ -7,7 +7,7 @@ from itertools import chain, count import os -from ._compat import InstallRequirement +from ._compat import install_req_from_line from . import click from .cache import DependencyCache @@ -306,7 +306,7 @@ def _iter_dependencies(self, ireq): log.debug(' {:25} requires {}'.format(format_requirement(ireq), ', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-')) for dependency_string in dependency_strings: - yield InstallRequirement.from_line(dependency_string, constraint=ireq.constraint) + yield install_req_from_line(dependency_string, constraint=ireq.constraint) def reverse_dependencies(self, ireqs): non_editable = [ireq for ireq in ireqs if not ireq.editable] diff --git a/pipenv/patched/piptools/scripts/compile.py b/pipenv/patched/piptools/scripts/compile.py index 484b710cdc..4625618ada 100644 --- a/pipenv/patched/piptools/scripts/compile.py +++ b/pipenv/patched/piptools/scripts/compile.py @@ -8,7 +8,7 @@ import tempfile from .._compat import ( - InstallRequirement, + install_req_from_line, parse_requirements, cmdoptions, Command, @@ -134,7 +134,7 @@ def cli(verbose, dry_run, pre, rebuild, find_links, index_url, extra_index_url, if not upgrade and os.path.exists(dst_file): ireqs = parse_requirements(dst_file, finder=repository.finder, session=repository.session, options=pip_options) # Exclude packages from --upgrade-package/-P from the existing pins: We want to upgrade. - upgrade_pkgs_key = {key_from_req(InstallRequirement.from_line(pkg).req) for pkg in upgrade_packages} + upgrade_pkgs_key = {key_from_req(install_req_from_line(pkg).req) for pkg in upgrade_packages} existing_pins = {key_from_req(ireq.req): ireq for ireq in ireqs if is_pinned_requirement(ireq) and key_from_req(ireq.req) not in upgrade_pkgs_key} diff --git a/pipenv/patched/piptools/utils.py b/pipenv/patched/piptools/utils.py index a7259937fc..6f62eb9d59 100644 --- a/pipenv/patched/piptools/utils.py +++ b/pipenv/patched/piptools/utils.py @@ -9,7 +9,7 @@ from collections import OrderedDict from contextlib import contextmanager -from ._compat import InstallRequirement +from ._compat import install_req_from_line from .click import style from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier @@ -57,7 +57,7 @@ def simplify_markers(ireq): marker_str = ' and '.join(list(dedup(tuple(marker_list,)))) if marker_list else '' new_markers = Marker(marker_str) ireq.markers = new_markers - new_ireq = InstallRequirement.from_line(format_requirement(ireq)) + new_ireq = install_req_from_line(format_requirement(ireq)) if ireq.constraint: new_ireq.constraint = ireq.constraint return new_ireq @@ -117,11 +117,11 @@ def make_install_requirement(name, version, extras, markers, constraint=False): extras_string = "[{}]".format(",".join(sorted(extras))) if not markers: - return InstallRequirement.from_line( + return install_req_from_line( str('{}{}=={}'.format(name, extras_string, version)), constraint=constraint) else: - return InstallRequirement.from_line( + return install_req_from_line( str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))), constraint=constraint) diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 388d8c28ba..6e5c2a3197 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -77,6 +77,7 @@ index 28da51f..de9b435 100644 - install_req_from_editable = do_import('req.constructors', 'install_req_from_editable') + install_req_from_line = do_import('req.constructors', 'install_req_from_line', vendored_name="notpip") + install_req_from_editable = do_import('req.constructors', 'install_req_from_editable', vendored_name="notpip") ++ diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py index 08dabe1..480ad1e 100644 --- a/pipenv/patched/piptools/repositories/local.py @@ -112,7 +113,7 @@ index bf69803..163d510 100644 + InstallRequirement, + SafeFileCache ) -+os.environ["PIP_SHIMS_BASE_MODULE"] = "pip" ++os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" +from pip_shims.shims import do_import, VcsSupport, WheelCache +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier @@ -239,7 +240,7 @@ index bf69803..163d510 100644 + if self.DEFAULT_INDEX_URL not in self.finder.index_urls: + return + -+ url = 'https:/pypi.org/pypi/{0}/json'.format(ireq.req.name) ++ url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name) + releases = self.session.get(url).json()['releases'] + + matches = [ @@ -250,7 +251,7 @@ index bf69803..163d510 100644 + return + + release_requires = self.session.get( -+ 'https:/pypi.org/pypi/{0}/{1}/json'.format( ++ 'https://pypi.org/pypi/{0}/{1}/json'.format( + ireq.req.name, matches[0], + ), + ).json() From 6136edc0c5f7e1ac9a989255e3a8cc93455b5d71 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 7 Oct 2018 18:50:54 -0400 Subject: [PATCH 35/35] cast environment var as string Signed-off-by: Dan Ryan --- pipenv/patched/piptools/repositories/pypi.py | 2 +- tasks/vendoring/patches/patched/piptools.patch | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 3061ad34ad..a0678799d1 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -19,7 +19,7 @@ InstallRequirement, SafeFileCache ) -os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" +os.environ["PIP_SHIMS_BASE_MODULE"] = str("notpip") from pip_shims.shims import do_import, VcsSupport, WheelCache from packaging.requirements import Requirement from packaging.specifiers import SpecifierSet, Specifier diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 6e5c2a3197..4a653022b7 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -113,7 +113,7 @@ index bf69803..163d510 100644 + InstallRequirement, + SafeFileCache ) -+os.environ["PIP_SHIMS_BASE_MODULE"] = "notpip" ++os.environ["PIP_SHIMS_BASE_MODULE"] = str("notpip") +from pip_shims.shims import do_import, VcsSupport, WheelCache +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier