From c9041b8edbfed1abfbdf927476366994f3ef1095 Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Wed, 19 Dec 2018 10:26:20 +0800 Subject: [PATCH 01/81] Decouple project from the existence of Pipfile. --- news/3386.trivial.rst | 1 + pipenv/cli/command.py | 6 +++++- pipenv/core.py | 27 ++++++++++++++++----------- pipenv/project.py | 23 +++++++---------------- 4 files changed, 29 insertions(+), 28 deletions(-) create mode 100644 news/3386.trivial.rst diff --git a/news/3386.trivial.rst b/news/3386.trivial.rst new file mode 100644 index 0000000000..632b815303 --- /dev/null +++ b/news/3386.trivial.rst @@ -0,0 +1 @@ +Decouple project from the existence of Pipfile. diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index ec1bef6138..84dd0c21ca 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -154,7 +154,11 @@ def cli( # There is no virtualenv yet. if not project.virtualenv_exists: echo( - crayons.red("No virtualenv has been created for this project yet!"), + "{}({}){}".format( + crayons.red("No virtualenv has been created for this project"), + crayons.white(project.project_directory, bold=True), + crayons.red(" yet!") + ), err=True, ) ctx.abort() diff --git a/pipenv/core.py b/pipenv/core.py index 4fb7939e0f..c6d76f1c09 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -540,11 +540,16 @@ def ensure_project( # Automatically use an activated virtualenv. if PIPENV_USE_SYSTEM: system = True - if not project.pipfile_exists: - if deploy is True: - raise exceptions.PipfileNotFound - else: - project.touch_pipfile() + if not project.pipfile_exists and deploy: + raise exceptions.PipfileNotFound + # Fail if working under / + if not project.name: + click.echo( + "{0}: Pipenv is not intended to work under the root directory, " + "please choose another path.".format(crayons.red("ERROR")), + err=True + ) + sys.exit(1) # Skip virtualenv creation when --system was used. if not system: ensure_virtualenv( @@ -607,24 +612,24 @@ def shorten_path(location, bold=False): def do_where(virtualenv=False, bare=True): """Executes the where functionality.""" if not virtualenv: - location = project.pipfile_location - # Shorten the virtual display of the path to the virtualenv. - if not bare: - location = shorten_path(location) - if not location: + if not project.pipfile_exists: click.echo( "No Pipfile present at project home. Consider running " "{0} first to automatically generate a Pipfile for you." "".format(crayons.green("`pipenv install`")), err=True, ) + return + location = project.pipfile_location + # Shorten the virtual display of the path to the virtualenv. + if not bare: + location = shorten_path(location) elif not bare: click.echo( "Pipfile found at {0}.\n Considering this to be the project home." "".format(crayons.green(location)), err=True, ) - pass else: click.echo(project.project_directory) else: diff --git a/pipenv/project.py b/pipenv/project.py index e8550a041a..aeb7a40f9f 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -222,7 +222,7 @@ def name(self): @property def pipfile_exists(self): - return bool(self.pipfile_location) + return os.path.isfile(self.pipfile_location) @property def required_python_version(self): @@ -237,11 +237,7 @@ def required_python_version(self): @property def project_directory(self): - if self.pipfile_location is not None: - return os.path.abspath(os.path.join(self.pipfile_location, os.pardir)) - - else: - return None + return os.path.abspath(os.path.join(self.pipfile_location, os.pardir)) @property def requirements_exists(self): @@ -255,8 +251,7 @@ def is_venv_in_project(self): @property def virtualenv_exists(self): - # TODO: Decouple project from existence of Pipfile. - if self.pipfile_exists and os.path.exists(self.virtualenv_location): + if os.path.exists(self.virtualenv_location): if os.name == "nt": extra = ["Scripts", "activate.bat"] else: @@ -470,7 +465,7 @@ def pipfile_location(self): try: loc = pipfile.Pipfile.find(max_depth=PIPENV_MAX_DEPTH) except RuntimeError: - loc = None + loc = "Pipfile" self._pipfile_location = _normalized(loc) return self._pipfile_location @@ -499,6 +494,8 @@ def parsed_pipfile(self): def read_pipfile(self): # Open the pipfile, read it into memory. + if not self.pipfile_exists: + return "" with io.open(self.pipfile_location) as f: contents = f.read() self._pipfile_newlines = preferred_newlines(f) @@ -659,11 +656,6 @@ def dev_packages(self): """Returns a list of dev-packages, for pip-tools to consume.""" return self._build_package_list("dev-packages") - def touch_pipfile(self): - """Simply touches the Pipfile, for later use.""" - with open("Pipfile", "a"): - os.utime("Pipfile", None) - @property def pipfile_is_empty(self): if not self.pipfile_exists: @@ -680,7 +672,6 @@ def create_pipfile(self, python=None): ConfigOptionParser, make_option_group, index_group ) - name = self.name if self.name is not None else "Pipfile" config_parser = ConfigOptionParser(name=self.name) config_parser.add_option_group(make_option_group(index_group, config_parser)) install = config_parser.option_groups[0] @@ -834,7 +825,7 @@ def write_lockfile(self, content): @property def pipfile_sources(self): - if "source" not in self.parsed_pipfile: + if self.pipfile_is_empty or "source" not in self.parsed_pipfile: return [DEFAULT_SOURCE] # We need to make copies of the source info so we don't # accidentally modify the cache. See #2100 where values are From c17f36f9b2c912edecd1b1b4387d97f1d3917441 Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Wed, 19 Dec 2018 11:09:05 +0800 Subject: [PATCH 02/81] make path absolute --- pipenv/project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/project.py b/pipenv/project.py index aeb7a40f9f..64ef6a2956 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -53,7 +53,7 @@ def _normalized(p): path_str = matches and matches[0] or str(loc) else: path_str = str(loc) - return normalize_drive(path_str) + return normalize_drive(os.path.abspath(path_str)) DEFAULT_NEWLINES = u"\n" From 0efa7e9fca195352f1e011c646dcf0df3446fe0c Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Wed, 19 Dec 2018 15:36:04 +0800 Subject: [PATCH 03/81] remove redundant branch --- pipenv/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pipenv/core.py b/pipenv/core.py index c6d76f1c09..bd8c5fc188 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -624,7 +624,6 @@ def do_where(virtualenv=False, bare=True): # Shorten the virtual display of the path to the virtualenv. if not bare: location = shorten_path(location) - elif not bare: click.echo( "Pipfile found at {0}.\n Considering this to be the project home." "".format(crayons.green(location)), From 8124d10cfaceafd21774ca9257e031330b135c55 Mon Sep 17 00:00:00 2001 From: Jerome Leclanche Date: Sun, 23 Dec 2018 08:37:45 +0200 Subject: [PATCH 04/81] Prevent a crash in version check error message Fixes #3407 --- pipenv/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/core.py b/pipenv/core.py index 4fb7939e0f..24e94443be 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -566,7 +566,7 @@ def ensure_project( crayons.red("Warning", bold=True), crayons.normal("python_version", bold=True), crayons.blue(project.required_python_version), - crayons.blue(python_version(path_to_python)), + crayons.blue(python_version(path_to_python) or "unknown"), crayons.green(shorten_path(path_to_python)), ), err=True, From 786cc6ef2f546bd5c3af68f8ed454e91c8093fba Mon Sep 17 00:00:00 2001 From: Artemiy Date: Thu, 24 Jan 2019 21:29:35 +0300 Subject: [PATCH 05/81] add test for counting pre option and fix it's duplicate --- pipenv/cli/options.py | 1 - tests/unit/test_help.py | 13 +++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/pipenv/cli/options.py b/pipenv/cli/options.py index 745275ddd1..d6be0bac08 100644 --- a/pipenv/cli/options.py +++ b/pipenv/cli/options.py @@ -375,7 +375,6 @@ def install_options(f): f = index_option(f) f = extra_index_option(f) f = requirementstxt_option(f) - f = pre_option(f) f = selective_upgrade_option(f) f = ignore_pipfile_option(f) f = editable_option(f) diff --git a/tests/unit/test_help.py b/tests/unit/test_help.py index 2432e96996..d37556018a 100644 --- a/tests/unit/test_help.py +++ b/tests/unit/test_help.py @@ -9,3 +9,16 @@ def test_help(): stderr=subprocess.STDOUT, env=os.environ.copy(), ) assert output + + +def test_count_of_description_pre_option(): + test_command = 'pipenv install --help' + test_line = '--pre Allow pre-releases.' + out = subprocess.Popen(test_command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, _ = out.communicate() + lines = stdout.decode().split('\n') + count = 0 + for line in lines: + if line.strip().split() == test_line.split(): + count += 1 + assert count == 1 From 04a10c87dc70d571bd042b69d1d0d4a12259a65d Mon Sep 17 00:00:00 2001 From: David Beitey Date: Tue, 12 Feb 2019 12:50:15 +1000 Subject: [PATCH 06/81] Ensure docs show nav on small-screen devices Fixes #3527 --- docs/_templates/hacks.html | 1 - news/3527.doc.rst | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 news/3527.doc.rst diff --git a/docs/_templates/hacks.html b/docs/_templates/hacks.html index 0ec542fa19..9736d409c4 100644 --- a/docs/_templates/hacks.html +++ b/docs/_templates/hacks.html @@ -18,7 +18,6 @@ /* Remain Responsive! */ @media screen and (max-width: 1008px) { - div.sphinxsidebar {display: none;} div.document {width: 100%!important;} /* Have code blocks escape the document right-margin. */ diff --git a/news/3527.doc.rst b/news/3527.doc.rst new file mode 100644 index 0000000000..b6043a08d9 --- /dev/null +++ b/news/3527.doc.rst @@ -0,0 +1 @@ +Ensure docs show navigation on small-screen devices From 09e1d55beea4d840e62a4f633a72e17b5dfb2193 Mon Sep 17 00:00:00 2001 From: frostming Date: Mon, 1 Apr 2019 12:58:29 +0800 Subject: [PATCH 07/81] Refine the news entry --- news/3386.behavior.rst | 1 + news/3386.trivial.rst | 1 - news/3434.trivial.rst | 1 + 3 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 news/3386.behavior.rst delete mode 100644 news/3386.trivial.rst create mode 100644 news/3434.trivial.rst diff --git a/news/3386.behavior.rst b/news/3386.behavior.rst new file mode 100644 index 0000000000..8ddc27c6b2 --- /dev/null +++ b/news/3386.behavior.rst @@ -0,0 +1 @@ +Do not touch Pipfile early and rely on it so that one can do ``pipenv sync`` without a Pipfile. diff --git a/news/3386.trivial.rst b/news/3386.trivial.rst deleted file mode 100644 index 632b815303..0000000000 --- a/news/3386.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Decouple project from the existence of Pipfile. diff --git a/news/3434.trivial.rst b/news/3434.trivial.rst new file mode 100644 index 0000000000..622b52db4a --- /dev/null +++ b/news/3434.trivial.rst @@ -0,0 +1 @@ +Improve the error message when one tries to initialize a Pipenv project under ``/``. From 3fe5061682ed805db18a4104cd186b5e9eea9a8a Mon Sep 17 00:00:00 2001 From: Adam Goldschmidt Date: Sun, 20 Jan 2019 13:09:28 +0200 Subject: [PATCH 08/81] conservative check for known exceptions in subprocess stderr. --- pipenv/core.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 768e7ee73a..9dd2784ec3 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -925,9 +925,17 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): ) click.echo(crayons.blue("{0}".format(c.out)), err=True) if c.returncode != 0: - sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format(u"Failed creating virtual environment")) + sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Failed creating virtual environment")) + known_exceptions = { + "PermissionError": "Permission denied:", + } + # PermissionError - hide the traceback for better UX + for partition in (part + for e, part in known_exceptions.items() if e in c.err): + known_exceptions_partition = c.err.rpartition(partition) + c.err = "{} {}".format(known_exceptions_partition[1], known_exceptions_partition[2]) raise exceptions.VirtualenvCreationException( - extra=[crayons.blue("{0}".format(c.err)),] + extra=[crayons.red("{0}".format(c.err)),] ) else: From f52abe32bce46de4991a6376a0a1ec0a8bf0d73f Mon Sep 17 00:00:00 2001 From: Adam Goldschmidt Date: Fri, 25 Jan 2019 11:35:22 +0200 Subject: [PATCH 09/81] add behavior rst --- news/2553.behavior.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/2553.behavior.rst diff --git a/news/2553.behavior.rst b/news/2553.behavior.rst new file mode 100644 index 0000000000..d66edfa2fa --- /dev/null +++ b/news/2553.behavior.rst @@ -0,0 +1 @@ +Make conservative checks of known exceptions when subprocess returns output, so user won't see the whole traceback - just the error. \ No newline at end of file From 2d75f1af53238a89d5eddbe1f83fb2ed244e6d4f Mon Sep 17 00:00:00 2001 From: Adam Goldschmidt Date: Mon, 11 Mar 2019 09:35:17 +0200 Subject: [PATCH 10/81] =?UTF-8?q?feat:=20=F0=9F=8E=B8=20Add=20prettify=5Fe?= =?UTF-8?q?xc=20method=20to=20handle=20known=20errors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit catch known errors in stderr and display them correctly --- pipenv/core.py | 11 ++--------- pipenv/exceptions.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 9dd2784ec3..76ce7e8b47 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -926,16 +926,9 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): click.echo(crayons.blue("{0}".format(c.out)), err=True) if c.returncode != 0: sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Failed creating virtual environment")) - known_exceptions = { - "PermissionError": "Permission denied:", - } - # PermissionError - hide the traceback for better UX - for partition in (part - for e, part in known_exceptions.items() if e in c.err): - known_exceptions_partition = c.err.rpartition(partition) - c.err = "{} {}".format(known_exceptions_partition[1], known_exceptions_partition[2]) + error = c.err if environments.is_verbose() else exceptions.prettify_exc(c.err) raise exceptions.VirtualenvCreationException( - extra=[crayons.red("{0}".format(c.err)),] + extra=[crayons.red("{0}".format(error)),] ) else: diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 000c57153d..495a46bfbb 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -18,7 +18,11 @@ ) from .vendor.click.types import Path from .vendor.click.utils import echo as click_echo +import vistir +KNOWN_EXCEPTIONS = { + "PermissionError": "Permission denied:", +} def handle_exception(exc_type, exception, traceback, hook=sys.excepthook): if environments.is_verbose() or not issubclass(exc_type, ClickException): @@ -402,3 +406,18 @@ def __init__(self, req=None): ) extra = [crayons.normal(decode_for_output(str(req)))] super(RequirementError, self).__init__(message, extra=extra) + super(ResolutionFailure, self).__init__(fix_utf8(message), extra=extra) + + +def prettify_exc(error): + """Catch known errors and prettify them instead of showing the + entire traceback, for better UX""" + matched_exceptions = [k for k in KNOWN_EXCEPTIONS.keys() if k in error] + if not matched_exceptions: + return "{}".format(vistir.misc.decode_for_output(error)) + errors = [] + for match in matched_exceptions: + _, error, info = error.rpartition(KNOWN_EXCEPTIONS[match]) + errors.append("{} {}".format(error, info)) + + return "\n".join(errors) From 8e584055cf1b7dcfa7497fc1a5c1a937199428b7 Mon Sep 17 00:00:00 2001 From: John Vandenberg Date: Wed, 3 Apr 2019 10:11:44 +0700 Subject: [PATCH 11/81] conftest.py: Ignore KeyboardInterrupt During check_internet() and check_github_ssh(), a KeyboardInterrupt should be interpreted as user desire to escape the check, not escape the entire test run. This is especially true during check_github_ssh which may require a passphrase from the user, which they might feel uncomfortable giving during a test suite. After these checks are bypassed, there is user feedback indicating the tests are running, and so they can trigger KeyboardInterrupt again if they wish to escape the entire test run. --- tests/integration/conftest.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 766291e835..d6d0aded27 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -38,6 +38,10 @@ def check_internet(): for url in ("http://httpbin.org/ip", "http://clients3.google.com/generate_204"): try: try_internet(url) + except KeyboardInterrupt: + warnings.warn( + "Skipped connecting to internet: {0}".format(url), RuntimeWarning + ) except Exception: warnings.warn( "Failed connecting to internet: {0}".format(url), RuntimeWarning @@ -58,6 +62,10 @@ def check_github_ssh(): # return_code=255 and say 'Permission denied (publickey).' c = delegator.run('ssh -T git@github.com') res = True if c.return_code == 1 else False + except KeyboardInterrupt: + warnings.warn( + "KeyboardInterrupt while checking GitHub ssh access", RuntimeWarning + ) except Exception: pass global HAS_WARNED_GITHUB From 9feb0e08ec3233d6f99c807f0ae0418533b0ca27 Mon Sep 17 00:00:00 2001 From: John Vandenberg Date: Wed, 3 Apr 2019 12:08:30 +0700 Subject: [PATCH 12/81] Add news entry --- news/3669.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/3669.trivial.rst diff --git a/news/3669.trivial.rst b/news/3669.trivial.rst new file mode 100644 index 0000000000..86ff928050 --- /dev/null +++ b/news/3669.trivial.rst @@ -0,0 +1 @@ +Allow KeyboardInterrupt to cancel test suite checks for working internet and ssh From f0b527798c903346d9ea0eafcbfbd603b749ae1d Mon Sep 17 00:00:00 2001 From: Jonathon Belotti Date: Tue, 9 Apr 2019 22:58:09 +1000 Subject: [PATCH 13/81] remove always-True if-statement, and make function docstring accurate --- pipenv/utils.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pipenv/utils.py b/pipenv/utils.py index 22c8653b3f..38e8e2e9b8 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -1374,15 +1374,14 @@ def walk_up(bottom): def find_requirements(max_depth=3): - """Returns the path of a Pipfile in parent directories.""" + """Returns the path of a requirements.txt file in parent directories.""" i = 0 for c, d, f in walk_up(os.getcwd()): i += 1 if i < max_depth: - if "requirements.txt": - r = os.path.join(c, "requirements.txt") - if os.path.isfile(r): - return r + r = os.path.join(c, "requirements.txt") + if os.path.isfile(r): + return r raise RuntimeError("No requirements.txt found!") From 3493f53c5cfa25ba5d3278215b3d57bcf7a8dabf Mon Sep 17 00:00:00 2001 From: s-pace Date: Wed, 17 Apr 2019 11:44:38 +0200 Subject: [PATCH 14/81] feat: add search to the main introduction page --- docs/_templates/sidebarintro.html | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/docs/_templates/sidebarintro.html b/docs/_templates/sidebarintro.html index 23b91e7e1a..8390940eaa 100644 --- a/docs/_templates/sidebarintro.html +++ b/docs/_templates/sidebarintro.html @@ -8,12 +8,32 @@

+ + + +

Pipenv is a production-ready tool that aims to bring the best of all packaging worlds to the Python world. It harnesses Pipfile, pip, and virtualenv into one single command.

It features very pretty terminal colors.

-

Stay Informed

Receive updates on new releases and upcoming projects.

From cd13852fca562cc408851c264a968157760ae78d Mon Sep 17 00:00:00 2001 From: s-pace Date: Wed, 17 Apr 2019 11:45:20 +0200 Subject: [PATCH 15/81] feat: add search to every documentation pages --- docs/_templates/sidebarlogo.html | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/_templates/sidebarlogo.html b/docs/_templates/sidebarlogo.html index 5107e4de34..00fb20c4b0 100644 --- a/docs/_templates/sidebarlogo.html +++ b/docs/_templates/sidebarlogo.html @@ -8,6 +8,27 @@

+ + + +

Pipenv is a production-ready tool that aims to bring the best of all packaging worlds to the Python world. It harnesses Pipfile, pip, and virtualenv into one single command. From ff4c8b0d5c0ea039b832f86a85f36e8558042902 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 18 Apr 2019 13:50:48 -0400 Subject: [PATCH 16/81] Update vendored deps Signed-off-by: Dan Ryan --- pipenv/__init__.py | 22 +- pipenv/environment.py | 10 +- pipenv/resolver.py | 221 ++++- pipenv/utils.py | 386 ++++++-- pipenv/vendor/pythonfinder/environment.py | 5 + pipenv/vendor/pythonfinder/models/mixins.py | 9 +- pipenv/vendor/pythonfinder/models/path.py | 259 +++-- pipenv/vendor/pythonfinder/models/python.py | 8 +- pipenv/vendor/pythonfinder/pythonfinder.py | 13 +- pipenv/vendor/pythonfinder/utils.py | 32 +- pipenv/vendor/requirementslib/__init__.py | 2 +- .../vendor/requirementslib/models/markers.py | 514 +++++++++- .../vendor/requirementslib/models/pipfile.py | 68 +- .../requirementslib/models/requirements.py | 395 +++----- .../requirementslib/models/setup_info.py | 914 +++++++++++++----- pipenv/vendor/requirementslib/models/url.py | 6 + pipenv/vendor/requirementslib/models/utils.py | 118 ++- pipenv/vendor/requirementslib/utils.py | 34 +- pipenv/vendor/vistir/__init__.py | 4 +- pipenv/vendor/vistir/_winconsole.py | 393 ++++++++ pipenv/vendor/vistir/compat.py | 43 +- pipenv/vendor/vistir/contextmanagers.py | 2 + pipenv/vendor/vistir/cursor.py | 30 +- pipenv/vendor/vistir/misc.py | 264 ++++- pipenv/vendor/vistir/path.py | 23 +- pipenv/vendor/vistir/termcolors.py | 71 +- tests/integration/conftest.py | 4 +- 27 files changed, 2866 insertions(+), 984 deletions(-) create mode 100644 pipenv/vendor/vistir/_winconsole.py diff --git a/pipenv/__init__.py b/pipenv/__init__.py index 016f1012f1..7128163aa8 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -36,23 +36,11 @@ except Exception: pass -from .vendor.vistir.misc import get_wrapped_stream -if sys.version_info >= (3, 0): - stdout = sys.stdout.buffer - stderr = sys.stderr.buffer -else: - stdout = sys.stdout - stderr = sys.stderr - - -sys.stderr = get_wrapped_stream(stderr) -sys.stdout = get_wrapped_stream(stdout) -from .vendor.colorama import AnsiToWin32 -if os.name == "nt": - stderr_wrapper = AnsiToWin32(sys.stderr, autoreset=False, convert=None, strip=None) - stdout_wrapper = AnsiToWin32(sys.stdout, autoreset=False, convert=None, strip=None) - sys.stderr = stderr_wrapper.stream - sys.stdout = stdout_wrapper.stream +from .vendor.vistir.misc import replace_with_text_stream +from .vendor import colorama +replace_with_text_stream("stdout") +replace_with_text_stream("stderr") +colorama.init(wrap=False) from .cli import cli from . import resolver diff --git a/pipenv/environment.py b/pipenv/environment.py index 7ada63993b..4744c32db2 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -442,7 +442,7 @@ def reverse_dependency(cls, node): yield new_node def reverse_dependencies(self): - from vistir.misc import unnest + from vistir.misc import unnest, chunked rdeps = {} for req in self.get_package_requirements(): for d in self.reverse_dependency(req): @@ -454,18 +454,20 @@ def reverse_dependencies(self): "required": d["required_version"] } } - parents = set(d.get("parent", [])) + parents = tuple(d.get("parent", ())) pkg[name]["parents"] = parents if rdeps.get(name): if not (rdeps[name].get("required") or rdeps[name].get("installed")): rdeps[name].update(pkg[name]) - rdeps[name]["parents"] = rdeps[name].get("parents", set()) | parents + rdeps[name]["parents"] = rdeps[name].get("parents", ()) + parents else: rdeps[name] = pkg[name] for k in list(rdeps.keys()): entry = rdeps[k] if entry.get("parents"): - rdeps[k]["parents"] = set([p for p in unnest(entry["parents"])]) + rdeps[k]["parents"] = set([ + p for p, version in chunked(2, unnest(entry["parents"])) + ]) return rdeps def get_working_set(self): diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 640317066e..4f4df8a55a 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -133,12 +133,104 @@ def clean_initial_dict(cls, entry_dict): del entry_dict["name"] return entry_dict - def get_cleaned_dict(self): - if self.is_updated: + @classmethod + def parse_pyparsing_exprs(cls, expr_iterable): + from pipenv.vendor.pyparsing import Literal, MatchFirst + keys = [] + expr_list = [] + expr = expr_iterable.copy() + if isinstance(expr, Literal) or ( + expr.__class__.__name__ == Literal.__name__ + ): + keys.append(expr.match) + elif isinstance(expr, MatchFirst) or ( + expr.__class__.__name__ == MatchFirst.__name__ + ): + expr_list = expr.exprs + elif isinstance(expr, list): + expr_list = expr + if expr_list: + for part in expr_list: + keys.extend(cls.parse_pyparsing_exprs(part)) + return keys + + @classmethod + def get_markers_from_dict(cls, entry_dict): + from pipenv.vendor.packaging import markers as packaging_markers + from pipenv.vendor.requirementslib.models.markers import normalize_marker_str + marker_keys = cls.parse_pyparsing_exprs(packaging_markers.VARIABLE) + markers = set() + keys_in_dict = [k for k in marker_keys if k in entry_dict] + markers = { + normalize_marker_str("{k} {v}".format(k=k, v=entry_dict.pop(k))) + for k in keys_in_dict + } + if "markers" in entry_dict: + markers.add(normalize_marker_str(entry_dict["markers"])) + if None in markers: + markers.remove(None) + if markers: + entry_dict["markers"] = " and ".join(list(markers)) + else: + markers = None + return markers, entry_dict + + @property + def markers(self): + self._markers, self.entry_dict = self.get_markers_from_dict(self.entry_dict) + return self._markers + + @markers.setter + def markers(self, markers): + if not markers: + marker_str = self.marker_to_str(markers) + if marker_str: + self._entry = self.entry.merge_markers(marker_str) + self._markers = self.marker_to_str(self._entry.markers) + entry_dict = self.entry_dict.copy() + entry_dict["markers"] = self.marker_to_str(self._entry.markers) + self.entry_dict = entry_dict + + @property + def original_markers(self): + original_markers, lockfile_dict = self.get_markers_from_dict( + self.lockfile_dict + ) + self.lockfile_dict = lockfile_dict + self._original_markers = self.marker_to_str(original_markers) + return self._original_markers + + @staticmethod + def marker_to_str(marker): + from pipenv.vendor.requirementslib.models.markers import normalize_marker_str + if not marker: + return None + from pipenv.vendor import six + from pipenv.vendor.vistir.compat import Mapping + marker_str = None + if isinstance(marker, Mapping): + marker_dict, _ = Entry.get_markers_from_dict(marker) + if marker_dict: + marker_str = "{0}".format(marker_dict.popitem()[1]) + elif isinstance(marker, (list, set, tuple)): + marker_str = " and ".join([normalize_marker_str(m) for m in marker if m]) + elif isinstance(marker, six.string_types): + marker_str = "{0}".format(normalize_marker_str(marker)) + if isinstance(marker_str, six.string_types): + return marker_str + return None + + def get_cleaned_dict(self, keep_outdated=False): + if keep_outdated and self.is_updated: self.validate_constraints() self.ensure_least_updates_possible() + elif not keep_outdated: + self.validate_constraints() if self.entry.extras != self.lockfile_entry.extras: - self._entry.req.extras.extend(self.lockfile_entry.req.extras) + entry_extras = list(self.entry.extras) + if self.lockfile_entry.extras: + entry_extras.extend(list(self.lockfile_entry.extras)) + self._entry.req.extras = entry_extras self.entry_dict["extras"] = self.entry.extras entry_hashes = set(self.entry.hashes) locked_hashes = set(self.lockfile_entry.hashes) @@ -202,10 +294,10 @@ def create(cls, name, entry_dict, project, resolver, reverse_deps=None, dev=Fals def clean_specifier(specifier): from pipenv.vendor.packaging.specifiers import Specifier if not any(specifier.startswith(k) for k in Specifier._operators.keys()): - if specifier.strip().lower() in ["any", "*"]: + if specifier.strip().lower() in ["any", "", "*"]: return "*" specifier = "=={0}".format(specifier) - elif specifier.startswith("==") and specifier.count("=") > 2: + elif specifier.startswith("==") and specifier.count("=") > 3: specifier = "=={0}".format(specifier.lstrip("=")) return specifier @@ -255,7 +347,7 @@ def requirements(self): if not self._requires: self._requires = next(iter( self.project.environment.get_package_requirements(self.name) - ), None) + ), {}) return self._requires @property @@ -284,21 +376,25 @@ def validate_specifiers(self): return True def get_dependency(self, name): - return next(iter( - dep for dep in self.requirements.get("dependencies", []) - if dep.get("package_name", "") == name - ), {}) + if self.requirements: + return next(iter( + dep for dep in self.requirements.get("dependencies", []) + if dep and dep.get("package_name", "") == name + ), {}) + return {} def get_parent_deps(self, unnest=False): from pipenv.vendor.packaging.specifiers import Specifier parents = [] for spec in self.reverse_deps.get(self.normalized_name, {}).get("parents", set()): - spec_index = next(iter(c for c in Specifier._operators if c in spec), None) + spec_match = next(iter(c for c in Specifier._operators if c in spec), None) name = spec parent = None - if spec_index is not None: - specifier = self.clean_specifier(spec[spec_index:]) - name = spec[:spec_index] + if spec_match is not None: + spec_index = spec.index(spec_match) + specifier = self.clean_specifier(spec[spec_index:len(spec_match)]).strip() + name_start = spec_index + len(spec_match) + name = spec[name_start:].strip() parent = self.create_parent(name, specifier) else: name = spec @@ -373,7 +469,7 @@ def get_constraints(self): if c and c.name == self.entry.name } pipfile_constraint = self.get_pipfile_constraint() - if pipfile_constraint: + if pipfile_constraint and not (self.pipfile_entry.editable or pipfile_constraint.editable): constraints.add(pipfile_constraint) return constraints @@ -415,10 +511,16 @@ def constraint_from_parent_conflicts(self): required = self.clean_specifier(required) parent_requires = self.make_requirement(self.name, required) parent_dependencies.add("{0} => {1} ({2})".format(p.name, self.name, required)) - if not parent_requires.requirement.specifier.contains(self.original_version): + # use pre=True here or else prereleases dont satisfy constraints + if parent_requires.requirement.specifier and ( + not parent_requires.requirement.specifier.contains(self.original_version, prereleases=True) + ): can_use_original = False - if not parent_requires.requirement.specifier.contains(self.updated_version): - has_mismatch = True + if parent_requires.requirement.specifier and ( + not parent_requires.requirement.specifier.contains(self.updated_version, prereleases=True) + ): + if not self.entry.editable and self.updated_version != self.original_version: + has_mismatch = True if has_mismatch and not can_use_original: from pipenv.exceptions import DependencyConflict msg = ( @@ -500,6 +602,23 @@ def __getattribute__(self, key): return super(Entry, self).__getattribute__(key) +def clean_results(results, resolver, project, dev=False): + if not project.lockfile_exists: + return results + lockfile = project.lockfile_content + section = "develop" if dev else "default" + pipfile_section = "dev-packages" if dev else "packages" + reverse_deps = project.environment.reverse_dependencies() + new_results = [r for r in results if r["name"] not in lockfile[section]] + for result in results: + name = result.get("name") + entry_dict = result.copy() + entry = Entry(name, entry_dict, project, resolver, reverse_deps=reverse_deps, dev=dev) + entry_dict = entry.get_cleaned_dict(keep_outdated=False) + new_results.append(entry_dict) + return new_results + + def clean_outdated(results, resolver, project, dev=False): from pipenv.vendor.requirementslib.models.requirements import Requirement if not project.lockfile_exists: @@ -520,24 +639,29 @@ def clean_outdated(results, resolver, project, dev=False): # TODO: Should this be the case for all locking? if entry.was_editable and not entry.is_editable: continue - # if the entry has not changed versions since the previous lock, - # don't introduce new markers since that is more restrictive - if entry.has_markers and not entry.had_markers and not entry.is_updated: - del entry.entry_dict["markers"] - entry._entry.req.req.marker = None - entry._entry.markers = "" - # do make sure we retain the original markers for entries that are not changed - elif entry.had_markers and not entry.has_markers and not entry.is_updated: - if entry._entry and entry._entry.req and entry._entry.req.req and ( - entry.lockfile_entry and entry.lockfile_entry.req and - entry.lockfile_entry.req.req and entry.lockfile_entry.req.req.marker - ): - entry._entry.req.req.marker = entry.lockfile_entry.req.req.marker - if entry.lockfile_entry and entry.lockfile_entry.markers: - entry._entry.markers = entry.lockfile_entry.markers - if entry.lockfile_dict and "markers" in entry.lockfile_dict: - entry.entry_dict["markers"] = entry.lockfile_dict["markers"] - entry_dict = entry.get_cleaned_dict() + lockfile_entry = lockfile[section].get(name, None) + if not lockfile_entry: + alternate_section = "develop" if not dev else "default" + if name in lockfile[alternate_section]: + lockfile_entry = lockfile[alternate_section][name] + if lockfile_entry and not entry.is_updated: + old_markers = next(iter(m for m in ( + entry.lockfile_entry.markers, lockfile_entry.get("markers", None) + ) if m is not None), None) + new_markers = entry_dict.get("markers", None) + if old_markers: + old_markers = Entry.marker_to_str(old_markers) + if old_markers and not new_markers: + entry.markers = old_markers + elif new_markers and not old_markers: + del entry.entry_dict["markers"] + entry._entry.req.req.marker = None + entry._entry.markers = None + # if the entry has not changed versions since the previous lock, + # don't introduce new markers since that is more restrictive + # if entry.has_markers and not entry.had_markers and not entry.is_updated: + # do make sure we retain the original markers for entries that are not changed + entry_dict = entry.get_cleaned_dict(keep_outdated=True) new_results.append(entry_dict) return new_results @@ -582,6 +706,8 @@ def resolve_packages(pre, clear, verbose, system, write, requirements_dir, packa ) def resolve(packages, pre, project, sources, clear, system, requirements_dir=None): + from pipenv.patched.piptools import logging as piptools_logging + piptools_logging.log.verbosity = 1 if verbose else 0 return resolve_deps( packages, which, @@ -611,6 +737,8 @@ def resolve(packages, pre, project, sources, clear, system, requirements_dir=Non ) if keep_outdated: results = clean_outdated(results, resolver, project) + else: + results = clean_results(results, resolver, project) if write: with open(write, "w") as fh: if not results: @@ -646,26 +774,19 @@ def main(): _patch_path(pipenv_site=parsed.pipenv_site) import warnings from pipenv.vendor.vistir.compat import ResourceWarning - from pipenv.vendor.vistir.misc import get_wrapped_stream + from pipenv.vendor.vistir.misc import replace_with_text_stream warnings.simplefilter("ignore", category=ResourceWarning) - import six - if six.PY3: - stdout = sys.stdout.buffer - stderr = sys.stderr.buffer - else: - stdout = sys.stdout - stderr = sys.stderr - sys.stderr = get_wrapped_stream(stderr) - sys.stdout = get_wrapped_stream(stdout) + replace_with_text_stream("stdout") + replace_with_text_stream("stderr") from pipenv.vendor import colorama if os.name == "nt" and ( all(getattr(stream, method, None) for stream in [sys.stdout, sys.stderr] for method in ["write", "isatty"]) and all(stream.isatty() for stream in [sys.stdout, sys.stderr]) ): - stderr_wrapper = colorama.AnsiToWin32(sys.stderr, autoreset=False, convert=None, strip=None) - stdout_wrapper = colorama.AnsiToWin32(sys.stdout, autoreset=False, convert=None, strip=None) - sys.stderr = stderr_wrapper.stream - sys.stdout = stdout_wrapper.stream + # stderr_wrapper = colorama.AnsiToWin32(sys.stderr, autoreset=False, convert=None, strip=None) + # stdout_wrapper = colorama.AnsiToWin32(sys.stdout, autoreset=False, convert=None, strip=None) + # sys.stderr = stderr_wrapper.stream + # sys.stdout = stdout_wrapper.stream colorama.init(wrap=False) elif os.name != "nt": colorama.init() diff --git a/pipenv/utils.py b/pipenv/utils.py index 22c8653b3f..3d526974f3 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -38,6 +38,8 @@ if environments.MYPY_RUNNING: from typing import Tuple, Dict, Any, List, Union, Optional, Text from .vendor.requirementslib.models.requirements import Requirement, Line + from .vendor.packaging.markers import Marker + from .vendor.packaging.specifiers import Specifier from .project import Project @@ -292,11 +294,14 @@ def get_pipenv_sitedir(): class Resolver(object): - def __init__(self, constraints, req_dir, project, sources, clear=False, pre=False): + def __init__( + self, constraints, req_dir, project, sources, index_lookup=None, + markers_lookup=None, skipped=None, clear=False, pre=False + ): from pipenv.patched.piptools import logging as piptools_logging if environments.is_verbose(): logging.log.verbose = True - piptools_logging.log.verbose = True + piptools_logging.log.verbosity = environments.PIPENV_VERBOSITY self.initial_constraints = constraints self.req_dir = req_dir self.project = project @@ -306,6 +311,11 @@ def __init__(self, constraints, req_dir, project, sources, clear=False, pre=Fals self.clear = clear self.pre = pre self.results = None + self.markers_lookup = markers_lookup if markers_lookup is not None else {} + self.index_lookup = index_lookup if index_lookup is not None else {} + self.skipped = skipped if skipped is not None else {} + self.markers = {} + self.requires_python_markers = {} self._pip_args = None self._constraints = None self._parsed_constraints = None @@ -437,6 +447,7 @@ def get_deps_from_req(cls, req): except TypeError: raise RequirementError(req=req) setup_info = req.req.setup_info + setup_info.get_info() locked_deps[pep423_name(name)] = entry requirements = [v for v in getattr(setup_info, "requires", {}).values()] for r in requirements: @@ -446,20 +457,20 @@ def get_deps_from_req(cls, req): continue line = _requirement_to_str_lowercase_name(r) new_req, _, _ = cls.parse_line(line) - if r.marker and not r.marker.evaluate(): - new_constraints = {} - _, new_entry = req.pipfile_entry - new_lock = { - pep423_name(new_req.normalized_name): new_entry - } - else: - new_constraints, new_lock = cls.get_deps_from_req(new_req) - locked_deps.update(new_lock) - constraints |= new_constraints - else: - if r is not None: - line = _requirement_to_str_lowercase_name(r) - constraints.add(line) + if r.marker and not r.marker.evaluate(): + new_constraints = {} + _, new_entry = req.pipfile_entry + new_lock = { + pep423_name(new_req.normalized_name): new_entry + } + else: + new_constraints, new_lock = cls.get_deps_from_req(new_req) + locked_deps.update(new_lock) + constraints |= new_constraints + # if there is no marker or there is a valid marker, add the constraint line + elif r and (not r.marker or (r.marker and r.marker.evaluate())): + line = _requirement_to_str_lowercase_name(r) + constraints.add(line) # ensure the top level entry remains as provided # note that we shouldn't pin versions for editable vcs deps if (not req.is_vcs or (req.is_vcs and not req.editable)): @@ -477,10 +488,49 @@ def get_deps_from_req(cls, req): req.req.setup_path is not None and os.path.exists(req.req.setup_path)): constraints.add(req.constraint_line) else: + # if the dependency isn't installable, don't add it to constraints + # and instead add it directly to the lock + if req and req.requirement and ( + req.requirement.marker and not req.requirement.marker.evaluate() + ): + return constraints, locked_deps constraints.add(req.constraint_line) return constraints, locked_deps return constraints, locked_deps + @classmethod + def create( + cls, + deps, # type: List[str] + index_lookup=None, # type: Dict[str, str] + markers_lookup=None, # type: Dict[str, str] + project=None, # type: Project + sources=None, # type: List[str] + req_dir=None, # type: str + clear=False, # type: bool + pre=False # type: bool + ): + # type: (...) -> "Resolver" + from pipenv.vendor.vistir.path import create_tracked_tempdir + if not req_dir: + req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") + if index_lookup is None: + index_lookup = {} + if markers_lookup is None: + markers_lookup = {} + if project is None: + from pipenv.core import project + project = project + if sources is None: + sources = project.sources + constraints, skipped, index_lookup, markers_lookup = cls.get_metadata( + deps, index_lookup, markers_lookup, project, sources, + ) + return Resolver( + constraints, req_dir, project, sources, index_lookup=index_lookup, + markers_lookup=markers_lookup, skipped=skipped, clear=clear, pre=pre + ) + @property def pip_command(self): if self._pip_command is None: @@ -602,6 +652,35 @@ def resolve(self): self.resolved_tree.update(results) return self.resolved_tree + @lru_cache(maxsize=1024) + def fetch_candidate(self, ireq): + candidates = self.repository.find_all_candidates(ireq.name) + matched_version = next(iter(sorted( + ireq.specifier.filter((c.version for c in candidates), True), reverse=True) + ), None) + if matched_version: + matched_candidate = next(iter( + c for c in candidates if c.version == matched_version + )) + return matched_candidate + return None + + def resolve_constraints(self): + new_tree = set() + for result in self.resolved_tree: + if result.markers: + self.markers[result.name] = result.markers + else: + candidate = self.fetch_candidate(result) + if getattr(candidate, "requires_python", None): + marker = make_marker_from_specifier(candidate.requires_python) + self.markers[result.name] = marker + result.markers = marker + if result.req: + result.req.marker = marker + new_tree.add(result) + self.resolved_tree = new_tree + @classmethod def prepend_hash_types(cls, checksums): cleaned_checksums = [] @@ -720,6 +799,87 @@ def resolve_hashes(self): self.hashes[ireq] = self.get_hash(ireq, ireq_hashes=ireq_hashes) return self.hashes + def _clean_skipped_result(self, req, value): + ref = None + if req.is_vcs: + ref = req.commit_hash + ireq = req.as_ireq() + entry = value.copy() + entry["name"] = req.name + if entry.get("editable", False) and entry.get("version"): + del entry["version"] + ref = ref if ref is not None else entry.get("ref") + if ref: + entry["ref"] = ref + if self._should_include_hash(ireq): + collected_hashes = self.collect_hashes(ireq) + if collected_hashes: + entry["hashes"] = sorted(set(collected_hashes)) + return req.name, entry + + def clean_results(self): + from pipenv.vendor.requirementslib.models.requirements import Requirement + reqs = [(Requirement.from_ireq(ireq), ireq) for ireq in self.resolved_tree] + results = {} + for req, ireq in reqs: + if (req.vcs and req.editable and not req.is_direct_url): + continue + collected_hashes = self.collect_hashes(ireq) + req = req.add_hashes(collected_hashes) + if not collected_hashes and self._should_include_hash(ireq): + discovered_hashes = self.hashes.get(ireq, set()) | self.get_hash(ireq) + if discovered_hashes: + req = req.add_hashes(discovered_hashes) + self.hashes[ireq] = collected_hashes = discovered_hashes + if collected_hashes: + collected_hashes = sorted(set(collected_hashes)) + name, entry = format_requirement_for_lockfile( + req, self.markers_lookup, self.index_lookup, collected_hashes + ) + if name in results: + results[name].update(entry) + else: + results[name] = entry + for k in list(self.skipped.keys()): + req = Requirement.from_pipfile(k, self.skipped[k]) + name, entry = self._clean_skipped_result(req, self.skipped[k]) + if name in results: + results[name].update(entry) + else: + results[name] = entry + results = list(results.values()) + return results + + +def format_requirement_for_lockfile(req, markers_lookup, index_lookup, hashes=None): + if req.specifiers: + version = str(req.get_version()) + else: + version = None + index = index_lookup.get(req.normalized_name) + markers = markers_lookup.get(req.normalized_name) + req.index = index + name, pf_entry = req.pipfile_entry + name = pep423_name(req.name) + entry = {} + if isinstance(pf_entry, six.string_types): + entry["version"] = pf_entry.lstrip("=") + else: + entry.update(pf_entry) + if version is not None: + entry["version"] = version + if req.line_instance.is_direct_url: + entry["file"] = req.req.uri + if hashes: + entry["hashes"] = sorted(set(hashes)) + entry["name"] = name + if index: # and index != next(iter(project.sources), {}).get("name"): + entry.update({"index": index}) + if markers: + entry.update({"markers": markers}) + entry = translate_markers(entry) + return name, entry + def _show_warning(message, category, filename, lineno, line): warnings.showwarning(message=message, category=category, filename=filename, @@ -738,87 +898,93 @@ def actually_resolve_deps( req_dir=None, ): from pipenv.vendor.vistir.path import create_tracked_tempdir - from pipenv.vendor.requirementslib.models.requirements import Requirement if not req_dir: req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") warning_list = [] with warnings.catch_warnings(record=True) as warning_list: - constraints, skipped, index_lookup, markers_lookup = Resolver.get_metadata( - deps, index_lookup, markers_lookup, project, sources, + resolver = Resolver.create( + deps, index_lookup, markers_lookup, project, sources, req_dir, clear, pre ) - resolver = Resolver(constraints, req_dir, project, sources, clear=clear, pre=pre) - resolved_tree = resolver.resolve() + resolver.resolve() hashes = resolver.resolve_hashes() - reqs = [(Requirement.from_ireq(ireq), ireq) for ireq in resolved_tree] - results = {} - for req, ireq in reqs: - if (req.vcs and req.editable and not req.is_direct_url): - continue - collected_hashes = resolver.collect_hashes(ireq) - if collected_hashes: - req = req.add_hashes(collected_hashes) - elif resolver._should_include_hash(ireq): - existing_hashes = hashes.get(ireq, set()) - discovered_hashes = existing_hashes | resolver.get_hash(ireq) - if discovered_hashes: - req = req.add_hashes(discovered_hashes) - resolver.hashes[ireq] = discovered_hashes - if req.specifiers: - version = str(req.get_version()) - else: - version = None - index = index_lookup.get(req.normalized_name) - markers = markers_lookup.get(req.normalized_name) - req.index = index - name, pf_entry = req.pipfile_entry - name = pep423_name(req.name) - entry = {} - if isinstance(pf_entry, six.string_types): - entry["version"] = pf_entry.lstrip("=") - else: - entry.update(pf_entry) - if version is not None: - entry["version"] = version - if req.line_instance.is_direct_url: - entry["file"] = req.req.uri - if collected_hashes: - entry["hashes"] = sorted(set(collected_hashes)) - entry["name"] = name - if index: # and index != next(iter(project.sources), {}).get("name"): - entry.update({"index": index}) - if markers: - entry.update({"markers": markers}) - entry = translate_markers(entry) - if name in results: - results[name].update(entry) - else: - results[name] = entry - for k in list(skipped.keys()): - req = Requirement.from_pipfile(k, skipped[k]) - ref = None - if req.is_vcs: - ref = req.commit_hash - ireq = req.as_ireq() - entry = skipped[k].copy() - entry["name"] = req.name - ref = ref if ref is not None else entry.get("ref") - if ref: - entry["ref"] = ref - if resolver._should_include_hash(ireq): - collected_hashes = resolver.collect_hashes(ireq) - if collected_hashes: - entry["hashes"] = sorted(set(collected_hashes)) - if k in results: - results[k].update(entry) - else: - results[k] = entry - results = list(results.values()) + resolver.resolve_constraints() + results = resolver.clean_results() + # constraints, skipped, index_lookup, markers_lookup = Resolver.get_metadata( + # deps, index_lookup, markers_lookup, project, sources, + # ) + # resolver = Resolver(constraints, req_dir, project, sources, clear=clear, pre=pre) + # resolved_tree = resolver.resolve() + # hashes = resolver.resolve_hashes() + # reqs = [(Requirement.from_ireq(ireq), ireq) for ireq in resolved_tree] + # results = {} + # for req, ireq in reqs: + # if (req.vcs and req.editable and not req.is_direct_url): + # continue + # collected_hashes = resolver.collect_hashes(ireq) + # if collected_hashes: + # req = req.add_hashes(collected_hashes) + # elif resolver._should_include_hash(ireq): + # existing_hashes = hashes.get(ireq, set()) + # discovered_hashes = existing_hashes | resolver.get_hash(ireq) + # if discovered_hashes: + # req = req.add_hashes(discovered_hashes) + # resolver.hashes[ireq] = discovered_hashes + # if req.specifiers: + # version = str(req.get_version()) + # else: + # version = None + # index = index_lookup.get(req.normalized_name) + # markers = markers_lookup.get(req.normalized_name) + # req.index = index + # name, pf_entry = req.pipfile_entry + # name = pep423_name(req.name) + # entry = {} + # if isinstance(pf_entry, six.string_types): + # entry["version"] = pf_entry.lstrip("=") + # else: + # entry.update(pf_entry) + # if version is not None: + # entry["version"] = version + # if req.line_instance.is_direct_url: + # entry["file"] = req.req.uri + # if collected_hashes: + # entry["hashes"] = sorted(set(collected_hashes)) + # entry["name"] = name + # if index: # and index != next(iter(project.sources), {}).get("name"): + # entry.update({"index": index}) + # if markers: + # entry.update({"markers": markers}) + # entry = translate_markers(entry) + # if name in results: + # results[name].update(entry) + # else: + # results[name] = entry + # for k in list(skipped.keys()): + # req = Requirement.from_pipfile(k, skipped[k]) + # ref = None + # if req.is_vcs: + # ref = req.commit_hash + # ireq = req.as_ireq() + # entry = skipped[k].copy() + # entry["name"] = req.name + # ref = ref if ref is not None else entry.get("ref") + # if ref: + # entry["ref"] = ref + # if resolver._should_include_hash(ireq): + # collected_hashes = resolver.collect_hashes(ireq) + # if collected_hashes: + # entry["hashes"] = sorted(set(collected_hashes)) + # if k in results: + # results[k].update(entry) + # else: + # results[k] = entry + # results = list(results.values()) for warning in warning_list: _show_warning(warning.message, warning.category, warning.filename, warning.lineno, warning.line) - return (results, hashes, markers_lookup, resolver, skipped) + return (results, hashes, resolver.markers_lookup, resolver, resolver.skipped) @contextlib.contextmanager @@ -845,29 +1011,37 @@ def resolve(cmd, sp): EOF.__module__ = "pexpect.exceptions" from ._compat import decode_output c = delegator.run(Script.parse(cmd).cmdify(), block=False, env=os.environ.copy()) + if environments.is_verbose(): + c.subprocess.logfile = sys.stderr _out = decode_output("") result = None out = to_native_string("") while True: + result = None try: result = c.expect(u"\n", timeout=environments.PIPENV_INSTALL_TIMEOUT) except (EOF, TIMEOUT): pass _out = c.subprocess.before - if _out is not None: + if _out: _out = decode_output("{0}".format(_out)) out += _out sp.text = to_native_string("{0}".format(_out[:100])) if environments.is_verbose(): sp.hide_and_write(_out.rstrip()) - if result is None: + # if environments.is_verbose(): + # sp.hide_and_write(_out.rstrip()) + if not result and not _out: break + _out = to_native_string("") c.block() if c.return_code != 0: sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format( "Locking Failed!" )) click_echo(c.out.strip(), err=True) + if not environments.is_verbose(): + click_echo(out, err=True) click_echo(c.err.strip(), err=True) sys.exit(c.return_code) return c @@ -1038,9 +1212,6 @@ def venv_resolve_deps( raise RuntimeError("There was a problem with locking.") if os.path.exists(target_file.name): os.unlink(target_file.name) - if environments.is_verbose(): - click_echo(results, err=True) - if lockfile_section not in lockfile: lockfile[lockfile_section] = {} prepare_lockfile(results, pipfile, lockfile[lockfile_section]) @@ -1639,7 +1810,7 @@ def clean_resolved_dep(dep, is_top_level=False, pipfile_entry=None): lockfile = {} # We use this to determine if there are any markers on top level packages # So we can make sure those win out during resolution if the packages reoccur - if "version" in dep: + if "version" in dep and dep["version"] and not dep.get("editable", False): version = "{0}".format(dep["version"]) if not version.startswith("=="): version = "=={0}".format(version) @@ -1939,3 +2110,34 @@ def is_python_command(line): if line.startswith("py"): return True return False + + +def make_marker_from_specifier(spec): + # type: (str) -> Optional[Marker] + """Given a python version specifier, create a marker + + :param spec: A specifier + :type spec: str + :return: A new marker + :rtype: Optional[:class:`packaging.marker.Marker`] + """ + from .vendor.packaging.specifiers import SpecifierSet, Specifier + from .vendor.packaging.markers import Marker + from .vendor.requirementslib.models.markers import cleanup_pyspecs, format_pyversion + if not any(spec.startswith(k) for k in Specifier._operators.keys()): + if spec.strip().lower() in ["any", "", "*"]: + return None + spec = "=={0}".format(spec) + elif spec.startswith("==") and spec.count("=") > 3: + spec = "=={0}".format(spec.lstrip("=")) + specset = cleanup_pyspecs(SpecifierSet(spec)) + marker_str = " and ".join([format_pyversion(pv) for pv in specset]) + return Marker(marker_str) + # spec_match = next(iter(c for c in Specifier._operators if c in spec), None) + # if spec_match: + # spec_index = spec.index(spec_match) + # spec_end = spec_index + len(spec_match) + # op = spec[spec_index:spec_end].strip() + # version = spec[spec_end:].strip() + # spec = " {0} '{1}'".format(op, version) + # return Marker("python_version {0}".format(spec)) diff --git a/pipenv/vendor/pythonfinder/environment.py b/pipenv/vendor/pythonfinder/environment.py index eb00043800..6b22fb08bb 100644 --- a/pipenv/vendor/pythonfinder/environment.py +++ b/pipenv/vendor/pythonfinder/environment.py @@ -35,6 +35,11 @@ def is_type_checking(): IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking()) +SUBPROCESS_TIMEOUT = os.environ.get("PYTHONFINDER_SUBPROCESS_TIMEOUT", 5) +"""The default subprocess timeout for determining python versions + +Set to **5** by default. +""" def get_shim_paths(): diff --git a/pipenv/vendor/pythonfinder/models/mixins.py b/pipenv/vendor/pythonfinder/models/mixins.py index c1e7312ab0..b725f7f9f5 100644 --- a/pipenv/vendor/pythonfinder/models/mixins.py +++ b/pipenv/vendor/pythonfinder/models/mixins.py @@ -89,6 +89,9 @@ def __del__(self): self._children = {} for key in list(self._pythons.keys()): del self._pythons[key] + self._pythons = None + self._py_version = None + self.path = None @property def children(self): @@ -315,10 +318,8 @@ def get_versions(self): raise NotImplementedError @classmethod - def create( - cls, *args, **kwargs # type: Type[BaseFinderType] # type: Any # type: Any - ): - # type: (...) -> BaseFinderType + def create(cls, *args, **kwargs): + # type: (Any, Any) -> BaseFinderType raise NotImplementedError @property diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 9e099b593f..55f7cb13eb 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -22,6 +22,7 @@ PYENV_INSTALLED, PYENV_ROOT, SHIM_PATHS, + get_shim_paths, ) from ..exceptions import InvalidPythonVersion from ..utils import ( @@ -76,10 +77,9 @@ class SystemPath(object): path_order = attr.ib(default=attr.Factory(list)) # type: List[str] python_version_dict = attr.ib() # type: DefaultDict[Tuple, List[PythonVersion]] only_python = attr.ib(default=False, type=bool) - pyenv_finder = attr.ib( - default=None, validator=optional_instance_of("PythonFinder") - ) # type: Optional[PythonFinder] + pyenv_finder = attr.ib(default=None) # type: Optional[PythonFinder] asdf_finder = attr.ib(default=None) # type: Optional[PythonFinder] + windows_finder = attr.ib(default=None) # type: Optional[WindowsFinder] system = attr.ib(default=False, type=bool) _version_dict = attr.ib( default=attr.Factory(defaultdict) @@ -91,31 +91,63 @@ class SystemPath(object): ) # type: Dict[str, Union[WindowsFinder, PythonFinder]] def _register_finder(self, finder_name, finder): - # type: (str, Union[WindowsFinder, PythonFinder]) -> None + # type: (str, Union[WindowsFinder, PythonFinder]) -> "SystemPath" if finder_name not in self.__finders: self.__finders[finder_name] = finder + return self def clear_caches(self): for key in ["executables", "python_executables", "version_dict", "path_entries"]: if key in self.__dict__: del self.__dict__[key] - self._executables = [] - self._python_executables = {} - self.python_version_dict = defaultdict(list) - self._version_dict = defaultdict(list) + for finder in list(self.__finders.keys()): + del self.__finders[finder] + self.__finders = {} + return attr.evolve( + self, + executables=[], + python_executables={}, + python_version_dict=defaultdict(list), + version_dict=defaultdict(list), + pyenv_finder=None, + windows_finder=None, + asdf_finder=None, + path_order=[], + paths=defaultdict(PathEntry), + ) def __del__(self): - self.clear_caches() + for key in ["executables", "python_executables", "version_dict", "path_entries"]: + try: + del self.__dict__[key] + except KeyError: + pass + for finder in list(self.__finders.keys()): + del self.__finders[finder] + self.__finders = {} + self._python_executables = {} + self._executables = [] + self.python_version_dict = defaultdict(list) + self.version_dict = defaultdict(list) self.path_order = [] self.pyenv_finder = None self.asdf_finder = None self.paths = defaultdict(PathEntry) + self.__finders = {} @property def finders(self): # type: () -> List[str] return [k for k in self.__finders.keys()] + @staticmethod + def check_for_pyenv(): + return PYENV_INSTALLED or os.path.exists(normalize_path(PYENV_ROOT)) + + @staticmethod + def check_for_asdf(): + return ASDF_INSTALLED or os.path.exists(normalize_path(ASDF_DATA_DIR)) + @python_version_dict.default def create_python_version_dict(self): # type: () -> DefaultDict[Tuple, List[PythonVersion]] @@ -168,35 +200,68 @@ def version_dict(self): self._version_dict[version].append(entry) return self._version_dict - def __attrs_post_init__(self): - # type: () -> None - #: slice in pyenv + def _run_setup(self): + # type: () -> "SystemPath" if not self.__class__ == SystemPath: - return - if os.name == "nt": - self._setup_windows() - if PYENV_INSTALLED: - self._setup_pyenv() - if ASDF_INSTALLED: - self._setup_asdf() + return self + new_instance = self + path_order = new_instance.path_order[:] + path_entries = self.paths.copy() + if self.global_search and "PATH" in os.environ: + path_order = path_order + os.environ["PATH"].split(os.pathsep) + path_instances = [ + ensure_path(p.strip('"')) + for p in path_order + if not any( + is_in_path(normalize_path(str(p)), normalize_path(shim)) + for shim in SHIM_PATHS + ) + ] + path_entries.update( + { + p.as_posix(): PathEntry.create( + path=p.absolute(), is_root=True, only_python=self.only_python + ) + for p in path_instances + } + ) + new_instance = attr.evolve( + new_instance, + path_order=[p.as_posix() for p in path_instances], + paths=path_entries, + ) + if os.name == "nt" and "windows" not in self.finders: + new_instance = new_instance._setup_windows() + #: slice in pyenv + if self.check_for_pyenv() and "pyenv" not in self.finders: + new_instance = new_instance._setup_pyenv() + #: slice in asdf + if self.check_for_asdf() and "asdf" not in self.finders: + new_instance = new_instance._setup_asdf() venv = os.environ.get("VIRTUAL_ENV") if os.name == "nt": bin_dir = "Scripts" else: bin_dir = "bin" - if venv and (self.system or self.global_search): + if venv and (new_instance.system or new_instance.global_search): p = ensure_path(venv) - self.path_order = [(p / bin_dir).as_posix()] + self.path_order - self.paths[p] = self.get_path(p.joinpath(bin_dir)) - if self.system: + path_order = [(p / bin_dir).as_posix()] + new_instance.path_order + new_instance = attr.evolve(new_instance, path_order=path_order) + paths = new_instance.paths.copy() + paths[p] = new_instance.get_path(p.joinpath(bin_dir)) + new_instance = attr.evolve(new_instance, paths=paths) + if new_instance.system: syspath = Path(sys.executable) syspath_bin = syspath.parent if syspath_bin.name != bin_dir and syspath_bin.joinpath(bin_dir).exists(): syspath_bin = syspath_bin / bin_dir - self.path_order = [syspath_bin.as_posix()] + self.path_order - self.paths[syspath_bin] = PathEntry.create( + path_order = [syspath_bin.as_posix()] + new_instance.path_order + paths = new_instance.paths.copy() + paths[syspath_bin] = PathEntry.create( path=syspath_bin, is_root=True, only_python=False ) + new_instance = attr.evolve(new_instance, path_order=path_order, paths=paths) + return new_instance def _get_last_instance(self, path): # type: (str) -> int @@ -210,7 +275,7 @@ def _get_last_instance(self, path): return path_index def _slice_in_paths(self, start_idx, paths): - # type: (int, List[Path]) -> None + # type: (int, List[Path]) -> "SystemPath" before_path = [] # type: List[str] after_path = [] # type: List[str] if start_idx == 0: @@ -220,29 +285,35 @@ def _slice_in_paths(self, start_idx, paths): else: before_path = self.path_order[: start_idx + 1] after_path = self.path_order[start_idx + 2 :] - self.path_order = before_path + [p.as_posix() for p in paths] + after_path + path_order = before_path + [p.as_posix() for p in paths] + after_path + if path_order == self.path_order: + return self + return attr.evolve(self, path_order=path_order) def _remove_path(self, path): - # type: (str) -> None + # type: (str) -> "SystemPath" path_copy = [p for p in reversed(self.path_order[:])] new_order = [] target = normalize_path(path) path_map = {normalize_path(pth): pth for pth in self.paths.keys()} + new_paths = self.paths.copy() if target in path_map: - del self.paths[path_map[target]] + del new_paths[path_map[target]] for current_path in path_copy: normalized = normalize_path(current_path) if normalized != target: new_order.append(normalized) new_order = [p for p in reversed(new_order)] - self.path_order = new_order + return attr.evolve(self, path_order=new_order, paths=new_paths) def _setup_asdf(self): - # type: () -> None + # type: () -> "SystemPath" + if "asdf" in self.finders and self.asdf_finder is not None: + return self from .python import PythonFinder os_path = os.environ["PATH"].split(os.pathsep) - self.asdf_finder = PythonFinder.create( + asdf_finder = PythonFinder.create( root=ASDF_DATA_DIR, ignore_unsupported=True, sort_function=parse_asdf_version_order, @@ -252,20 +323,24 @@ def _setup_asdf(self): try: asdf_index = self._get_last_instance(ASDF_DATA_DIR) except ValueError: - pyenv_index = 0 if is_in_path(next(iter(os_path), ""), PYENV_ROOT) else -1 + asdf_index = 0 if is_in_path(next(iter(os_path), ""), ASDF_DATA_DIR) else -1 if asdf_index is None: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here - return - root_paths = [p for p in self.asdf_finder.roots] - self._slice_in_paths(asdf_index, [self.asdf_finder.root]) - self.paths[self.asdf_finder.root] = self.asdf_finder - self.paths.update(self.asdf_finder.roots) - self._remove_path(normalize_path(os.path.join(ASDF_DATA_DIR, "shims"))) - self._register_finder("asdf", self.asdf_finder) + return self + root_paths = [p for p in asdf_finder.roots] + new_instance = self._slice_in_paths(asdf_index, [asdf_finder.root]) + paths = self.paths.copy() + paths[asdf_finder.root] = asdf_finder + paths.update(asdf_finder.roots) + return ( + attr.evolve(new_instance, paths=paths, asdf_finder=asdf_finder) + ._remove_path(normalize_path(os.path.join(ASDF_DATA_DIR, "shims"))) + ._register_finder("asdf", asdf_finder) + ) def reload_finder(self, finder_name): - # type: (str) -> None + # type: (str) -> "SystemPath" if finder_name is None: raise TypeError("Must pass a string as the name of the target finder") finder_attr = "{0}_finder".format(finder_name) @@ -286,19 +361,21 @@ def reload_finder(self, finder_name): finder_name == "asdf" and not ASDF_INSTALLED ): # Don't allow loading of finders that aren't explicitly 'installed' as it were - pass + return self setattr(self, finder_attr, None) if finder_name in self.__finders: del self.__finders[finder_name] - setup_fn() + return setup_fn() def _setup_pyenv(self): - # type: () -> None + # type: () -> "SystemPath" + if "pyenv" in self.finders and self.pyenv_finder is not None: + return self from .python import PythonFinder os_path = os.environ["PATH"].split(os.pathsep) - self.pyenv_finder = PythonFinder.create( + pyenv_finder = PythonFinder.create( root=PYENV_ROOT, sort_function=parse_pyenv_version_order, version_glob_path="versions/*", @@ -312,25 +389,37 @@ def _setup_pyenv(self): if pyenv_index is None: # we are in a virtualenv without global pyenv on the path, so we should # not write pyenv to the path here - return - - root_paths = [p for p in self.pyenv_finder.roots] - self._slice_in_paths(pyenv_index, [self.pyenv_finder.root]) - self.paths[self.pyenv_finder.root] = self.pyenv_finder - self.paths.update(self.pyenv_finder.roots) - self._remove_path(os.path.join(PYENV_ROOT, "shims")) - self._register_finder("pyenv", self.pyenv_finder) + return self + + root_paths = [p for p in pyenv_finder.roots] + new_instance = self._slice_in_paths(pyenv_index, [pyenv_finder.root]) + paths = new_instance.paths.copy() + paths[pyenv_finder.root] = pyenv_finder + paths.update(pyenv_finder.roots) + return ( + attr.evolve(new_instance, paths=paths, pyenv_finder=pyenv_finder) + ._remove_path(os.path.join(PYENV_ROOT, "shims")) + ._register_finder("pyenv", pyenv_finder) + ) def _setup_windows(self): - # type: () -> None + # type: () -> "SystemPath" + if "windows" in self.finders and self.windows_finder is not None: + return self from .windows import WindowsFinder - self.windows_finder = WindowsFinder.create() - root_paths = (p for p in self.windows_finder.paths if p.is_root) + windows_finder = WindowsFinder.create() + root_paths = (p for p in windows_finder.paths if p.is_root) path_addition = [p.path.as_posix() for p in root_paths] - self.path_order = self.path_order[:] + path_addition - self.paths.update({p.path: p for p in root_paths}) - self._register_finder("windows", self.windows_finder) + new_path_order = self.path_order[:] + path_addition + new_paths = self.paths.copy() + new_paths.update({p.path: p for p in root_paths}) + return attr.evolve( + self, + windows_finder=windows_finder, + path_order=new_path_order, + paths=new_paths, + )._register_finder("windows", windows_finder) def get_path(self, path): # type: (Union[str, Path]) -> PathType @@ -350,7 +439,7 @@ def get_path(self, path): return _path def _get_paths(self): - # type: () -> Iterator + # type: () -> Generator[PathType, None, None] for path in self.path_order: try: entry = self.get_path(path) @@ -558,30 +647,44 @@ def create( paths = [] # type: List[str] if ignore_unsupported: os.environ["PYTHONFINDER_IGNORE_UNSUPPORTED"] = fs_str("1") - if global_search: - if "PATH" in os.environ: - paths = os.environ["PATH"].split(os.pathsep) + # if global_search: + # if "PATH" in os.environ: + # paths = os.environ["PATH"].split(os.pathsep) + path_order = [] if path: - paths = [path] + paths - paths = [p for p in paths if not any(is_in_path(p, shim) for shim in SHIM_PATHS)] - _path_objects = [ensure_path(p.strip('"')) for p in paths] - paths = [p.as_posix() for p in _path_objects] - path_entries.update( - { - p.as_posix(): PathEntry.create( - path=p.absolute(), is_root=True, only_python=only_python - ) - for p in _path_objects - } - ) - return cls( + path_order = [path] + path_instance = ensure_path(path) + path_entries.update( + { + path_instance.as_posix(): PathEntry.create( + path=path_instance.absolute(), + is_root=True, + only_python=only_python, + ) + } + ) + # paths = [path] + paths + # paths = [p for p in paths if not any(is_in_path(p, shim) for shim in SHIM_PATHS)] + # _path_objects = [ensure_path(p.strip('"')) for p in paths] + # paths = [p.as_posix() for p in _path_objects] + # path_entries.update( + # { + # p.as_posix(): PathEntry.create( + # path=p.absolute(), is_root=True, only_python=only_python + # ) + # for p in _path_objects + # } + # ) + instance = cls( paths=path_entries, - path_order=paths, + path_order=path_order, only_python=only_python, system=system, global_search=global_search, ignore_unsupported=ignore_unsupported, ) + instance = instance._run_setup() + return instance @attr.s(slots=True) @@ -603,8 +706,6 @@ def _filter_children(self): def _gen_children(self): # type: () -> Iterator - from ..environment import get_shim_paths - shim_paths = get_shim_paths() pass_name = self.name != self.path.name pass_args = {"is_root": False, "only_python": self.only_python} diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py index 25a12d6670..8e5eecd6e9 100644 --- a/pipenv/vendor/pythonfinder/models/python.py +++ b/pipenv/vendor/pythonfinder/models/python.py @@ -229,10 +229,8 @@ def get_pythons(self): return self.pythons @classmethod - def create( - cls, root, sort_function, version_glob_path=None, ignore_unsupported=True - ): # type: ignore - # type: (Type[PythonFinder], str, Callable, Optional[str], bool) -> PythonFinder + def create(cls, root, sort_function, version_glob_path=None, ignore_unsupported=True): + # type: (str, Callable, Optional[str], bool) -> PythonFinder root = ensure_path(root) if not version_glob_path: version_glob_path = "versions/*" @@ -593,6 +591,8 @@ def parse_executable(cls, path): raise TypeError("Must pass a valid path to parse.") if not isinstance(path, six.string_types): path = path.as_posix() + # if not looks_like_python(path): + # raise ValueError("Path %r does not look like a valid python path" % path) try: result_version = get_python_version(path) except Exception: diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index d5f38fb4cb..a68eab1e93 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function +import importlib import operator import os @@ -10,7 +11,6 @@ from . import environment from .exceptions import InvalidPythonVersion -from .models import path as pyfinder_path from .utils import Iterable, filter_pythons, version_re if environment.MYPY_RUNNING: @@ -68,6 +68,7 @@ def __eq__(self, other): def create_system_path(self): # type: () -> SystemPath + pyfinder_path = importlib.import_module("pythonfinder.models.path") return pyfinder_path.SystemPath.create( path=self.path_prepend, system=self.system, @@ -84,8 +85,9 @@ def reload_system_path(self): """ if self._system_path is not None: - self._system_path.clear_caches() - self._system_path = None + self._system_path = self._system_path.clear_caches() + self._system_path = None + pyfinder_path = importlib.import_module("pythonfinder.models.path") six.moves.reload_module(pyfinder_path) self._system_path = self.create_system_path() @@ -95,8 +97,11 @@ def rehash(self): self._system_path = self.create_system_path() self.find_all_python_versions.cache_clear() self.find_python_version.cache_clear() - self.reload_system_path() + if self._windows_finder is not None: + self._windows_finder = None filter_pythons.cache_clear() + self.reload_system_path() + return self @property def system_path(self): diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index a82654f388..bf8a2f4030 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -6,13 +6,14 @@ import os import re from fnmatch import fnmatch +from threading import Timer import attr import six import vistir from packaging.version import LegacyVersion, Version -from .environment import MYPY_RUNNING, PYENV_ROOT +from .environment import MYPY_RUNNING, PYENV_ROOT, SUBPROCESS_TIMEOUT from .exceptions import InvalidPythonVersion six.add_move( @@ -37,11 +38,12 @@ from .models.path import PathEntry -version_re = re.compile( +version_re_str = ( r"(?P\d+)(?:\.(?P\d+))?(?:\.(?P(?<=\.)[0-9]+))?\.?" r"(?:(?P[abc]|rc|dev)(?:(?P\d+(?:\.\d+)*))?)" r"?(?P(\.post(?P\d+))?(\.dev(?P\d+))?)?" ) +version_re = re.compile(version_re_str) PYTHON_IMPLEMENTATIONS = ( @@ -53,13 +55,19 @@ "miniconda", "stackless", "activepython", + "pyston", "micropython", ) -RE_MATCHER = re.compile( - r"(({0})(?:\d?(?:\.\d[cpm]{{0,3}}))?(?:-?[\d\.]+)*[^z])".format( - "|".join(PYTHON_IMPLEMENTATIONS) - ) +KNOWN_EXTS = {"exe", "py", "fish", "sh", ""} +KNOWN_EXTS = KNOWN_EXTS | set( + filter(None, os.environ.get("PATHEXT", "").split(os.pathsep)) +) +PY_MATCH_STR = r"((?P{0})(?:\d?(?:\.\d[cpm]{{0,3}}))?(?:-?[\d\.]+)*[^z])".format( + "|".join(PYTHON_IMPLEMENTATIONS) ) +EXE_MATCH_STR = r"{0}(?:\.(?P{1}))?".format(PY_MATCH_STR, "|".join(KNOWN_EXTS)) +RE_MATCHER = re.compile(r"({0}|{1})".format(version_re_str, PY_MATCH_STR)) +EXE_MATCHER = re.compile(EXE_MATCH_STR) RULES_BASE = [ "*{0}", "*{0}?", @@ -71,11 +79,6 @@ ] RULES = [rule.format(impl) for impl in PYTHON_IMPLEMENTATIONS for rule in RULES_BASE] -KNOWN_EXTS = {"exe", "py", "fish", "sh", ""} -KNOWN_EXTS = KNOWN_EXTS | set( - filter(None, os.environ.get("PATHEXT", "").split(os.pathsep)) -) - MATCH_RULES = [] for rule in RULES: MATCH_RULES.extend( @@ -87,7 +90,11 @@ def get_python_version(path): # type: (str) -> str """Get python version string using subprocess from a given path.""" - version_cmd = [path, "-c", "import sys; print(sys.version.split()[0])"] + version_cmd = [ + path, + "-c", + "import sys; print('.'.join([str(i) for i in sys.version_info[:3]]))", + ] try: c = vistir.misc.run( version_cmd, @@ -97,6 +104,7 @@ def get_python_version(path): combine_stderr=False, write_to_stdout=False, ) + timer = Timer(5, c.kill) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index c3e237acee..c3f4b84d55 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.4.2" +__version__ = "1.4.3.dev0" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 70fe3bc035..8bf8656ed2 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -1,19 +1,35 @@ # -*- coding: utf-8 -*- -import attr +import itertools +import operator +import attr +import distlib.markers +import packaging.version +import six from packaging.markers import InvalidMarker, Marker +from packaging.specifiers import Specifier, SpecifierSet +from vistir.compat import Mapping, Set, lru_cache +from vistir.misc import _is_iterable, dedup -from ..exceptions import RequirementError from .utils import filter_none, validate_markers +from ..environment import MYPY_RUNNING +from ..exceptions import RequirementError + +from six.moves import reduce # isort:skip + + +if MYPY_RUNNING: + from typing import Optional, List + + +MAX_VERSIONS = {2: 7, 3: 10} @attr.s class PipenvMarkers(object): """System-level requirements - see PEP508 for more detail""" - os_name = attr.ib( - default=None, validator=attr.validators.optional(validate_markers) - ) + os_name = attr.ib(default=None, validator=attr.validators.optional(validate_markers)) sys_platform = attr.ib( default=None, validator=attr.validators.optional(validate_markers) ) @@ -92,3 +108,491 @@ def from_pipfile(cls, name, pipfile): pass else: return combined_marker + + +@lru_cache(maxsize=128) +def _tuplize_version(version): + return tuple(int(x) for x in filter(lambda i: i != "*", version.split("."))) + + +@lru_cache(maxsize=128) +def _format_version(version): + if not isinstance(version, six.string_types): + return ".".join(str(i) for i in version) + return version + + +# Prefer [x,y) ranges. +REPLACE_RANGES = {">": ">=", "<=": "<"} + + +@lru_cache(maxsize=128) +def _format_pyspec(specifier): + if isinstance(specifier, str): + if not any(op in specifier for op in Specifier._operators.keys()): + specifier = "=={0}".format(specifier) + specifier = Specifier(specifier) + version = specifier.version.replace(".*", "") + if ".*" in specifier.version: + specifier = Specifier("{0}{1}".format(specifier.operator, version)) + try: + op = REPLACE_RANGES[specifier.operator] + except KeyError: + return specifier + curr_tuple = _tuplize_version(version) + try: + next_tuple = (curr_tuple[0], curr_tuple[1] + 1) + except IndexError: + next_tuple = (curr_tuple[0], 1) + if not next_tuple[1] <= MAX_VERSIONS[next_tuple[0]]: + if specifier.operator == "<" and curr_tuple[1] <= MAX_VERSIONS[next_tuple[0]]: + op = "<=" + next_tuple = (next_tuple[0], curr_tuple[1]) + else: + return specifier + specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple))) + return specifier + + +@lru_cache(maxsize=128) +def _get_specs(specset): + if specset is None: + return + if isinstance(specset, Specifier) or not _is_iterable(specset): + new_specset = SpecifierSet() + specs = set() + specs.add(specset) + new_specset._specs = frozenset(specs) + specset = new_specset + if isinstance(specset, str): + specset = SpecifierSet(specset) + result = [] + for spec in set(specset): + version = spec.version + op = spec.operator + if op in ("in", "not in"): + versions = version.split(",") + op = "==" if op == "in" else "!=" + for ver in versions: + result.append((op, _tuplize_version(ver.strip()))) + else: + result.append((spec.operator, _tuplize_version(spec.version))) + return sorted(result, key=operator.itemgetter(1)) + + +@lru_cache(maxsize=128) +def _group_by_op(specs): + specs = [_get_specs(x) for x in list(specs)] + flattened = [(op, version) for spec in specs for op, version in spec] + specs = sorted(flattened) + grouping = itertools.groupby(specs, key=operator.itemgetter(0)) + return grouping + + +@lru_cache(maxsize=128) +def cleanup_pyspecs(specs, joiner="or"): + specs = {_format_pyspec(spec) for spec in specs} + # for != operator we want to group by version + # if all are consecutive, join as a list + results = set() + for op, versions in _group_by_op(tuple(specs)): + versions = [version[1] for version in versions] + versions = sorted(dedup(versions)) + # if we are doing an or operation, we need to use the min for >= + # this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6 + # if we do an AND operation we need to use MAX to be more selective + if op in (">", ">="): + if joiner == "or": + results.add((op, _format_version(min(versions)))) + else: + results.add((op, _format_version(max(versions)))) + # we use inverse logic here so we will take the max value if we are + # using OR but the min value if we are using AND + elif op in ("<=", "<"): + if joiner == "or": + results.add((op, _format_version(max(versions)))) + else: + results.add((op, _format_version(min(versions)))) + # leave these the same no matter what operator we use + elif op in ("!=", "==", "~="): + version_list = sorted( + "{0}".format(_format_version(version)) for version in versions + ) + version = ", ".join(version_list) + if len(version_list) == 1: + results.add((op, version)) + elif op == "!=": + results.add(("not in", version)) + elif op == "==": + results.add(("in", version)) + else: + specifier = SpecifierSet( + ",".join(sorted("{0}{1}".format(op, v) for v in version_list)) + )._specs + for s in specifier: + results.add((s._spec[0], s._spec[1])) + else: + if len(version) == 1: + results.add((op, version)) + else: + specifier = SpecifierSet("{0}".format(version))._specs + for s in specifier: + results.add((s._spec[0], s._spec[1])) + return sorted(results, key=operator.itemgetter(1)) + + +def fix_version_tuple(version_tuple): + op, version = version_tuple + max_major = max(MAX_VERSIONS.keys()) + if version[0] > max_major: + return (op, (max_major, MAX_VERSIONS[max_major])) + max_allowed = MAX_VERSIONS[version[0]] + if op == "<" and version[1] > max_allowed and version[1] - 1 <= max_allowed: + op = "<=" + version = (version[0], version[1] - 1) + return (op, version) + + +@lru_cache(maxsize=128) +def get_versions(specset, group_by_operator=True): + specs = [_get_specs(x) for x in list(tuple(specset))] + initial_sort_key = lambda k: (k[0], k[1]) + initial_grouping_key = operator.itemgetter(0) + if not group_by_operator: + initial_grouping_key = operator.itemgetter(1) + initial_sort_key = operator.itemgetter(1) + version_tuples = sorted( + set((op, version) for spec in specs for op, version in spec), key=initial_sort_key + ) + version_tuples = [fix_version_tuple(t) for t in version_tuples] + op_groups = [ + (grp, list(map(operator.itemgetter(1), keys))) + for grp, keys in itertools.groupby(version_tuples, key=initial_grouping_key) + ] + versions = [ + (op, packaging.version.parse(".".join(str(v) for v in val))) + for op, vals in op_groups + for val in vals + ] + return sorted(versions, key=operator.itemgetter(1)) + + +def _ensure_marker(marker): + if not isinstance(marker, Marker): + return Marker(str(marker)) + return marker + + +def gen_marker(mkr): + m = Marker("python_version == '1'") + m._markers.pop() + m._markers.append(mkr) + return m + + +def _strip_extra(elements): + """Remove the "extra == ..." operands from the list.""" + + return _strip_marker_elem("extra", elements) + + +def _strip_pyversion(elements): + return _strip_marker_elem("python_version", elements) + + +def _strip_marker_elem(elem_name, elements): + """Remove the supplied element from the marker. + + This is not a comprehensive implementation, but relies on an important + characteristic of metadata generation: The element's operand is always + associated with an "and" operator. This means that we can simply remove the + operand and the "and" operator associated with it. + """ + + extra_indexes = [] + preceding_operators = ["and"] if elem_name == "extra" else ["and", "or"] + for i, element in enumerate(elements): + if isinstance(element, list): + cancelled = _strip_marker_elem(elem_name, element) + if cancelled: + extra_indexes.append(i) + elif isinstance(element, tuple) and element[0].value == elem_name: + extra_indexes.append(i) + for i in reversed(extra_indexes): + del elements[i] + if i > 0 and elements[i - 1] in preceding_operators: + # Remove the "and" before it. + del elements[i - 1] + elif elements: + # This shouldn't ever happen, but is included for completeness. + # If there is not an "and" before this element, try to remove the + # operator after it. + del elements[0] + return not elements + + +def _get_stripped_marker(marker, strip_func): + """Build a new marker which is cleaned according to `strip_func`""" + + if not marker: + return None + marker = _ensure_marker(marker) + elements = marker._markers + strip_func(elements) + if elements: + return marker + return None + + +def get_without_extra(marker): + """Build a new marker without the `extra == ...` part. + + The implementation relies very deep into packaging's internals, but I don't + have a better way now (except implementing the whole thing myself). + + This could return `None` if the `extra == ...` part is the only one in the + input marker. + """ + + return _get_stripped_marker(marker, _strip_extra) + + +def get_without_pyversion(marker): + """Built a new marker without the `python_version` part. + + This could return `None` if the `python_version` section is the only section in the + marker. + """ + + return _get_stripped_marker(marker, _strip_pyversion) + + +def _markers_collect_extras(markers, collection): + # Optimization: the marker element is usually appended at the end. + for el in reversed(markers): + if isinstance(el, tuple) and el[0].value == "extra" and el[1].value == "==": + collection.add(el[2].value) + elif isinstance(el, list): + _markers_collect_extras(el, collection) + + +def _markers_collect_pyversions(markers, collection): + local_collection = [] + marker_format_str = "{0}" + for i, el in enumerate(reversed(markers)): + if isinstance(el, tuple) and el[0].value == "python_version": + new_marker = str(gen_marker(el)) + local_collection.append(marker_format_str.format(new_marker)) + elif isinstance(el, list): + _markers_collect_pyversions(el, local_collection) + if local_collection: + # local_collection = "{0}".format(" ".join(local_collection)) + collection.extend(local_collection) + + +def _markers_contains_extra(markers): + # Optimization: the marker element is usually appended at the end. + return _markers_contains_key(markers, "extra") + + +def _markers_contains_pyversion(markers): + return _markers_contains_key(markers, "python_version") + + +def _markers_contains_key(markers, key): + for element in reversed(markers): + if isinstance(element, tuple) and element[0].value == key: + return True + elif isinstance(element, list): + if _markers_contains_key(element, key): + return True + return False + + +@lru_cache(maxsize=128) +def get_contained_extras(marker): + """Collect "extra == ..." operands from a marker. + + Returns a list of str. Each str is a speficied extra in this marker. + """ + if not marker: + return set() + extras = set() + marker = _ensure_marker(marker) + _markers_collect_extras(marker._markers, extras) + return extras + + +def get_contained_pyversions(marker): + """Collect all `python_version` operands from a marker. + """ + + collection = [] + if not marker: + return set() + marker = _ensure_marker(marker) + # Collect the (Variable, Op, Value) tuples and string joiners from the marker + _markers_collect_pyversions(marker._markers, collection) + marker_str = " and ".join(sorted(collection)) + if not marker_str: + return set() + # Use the distlib dictionary parser to create a dictionary 'trie' which is a bit + # easier to reason about + marker_dict = distlib.markers.parse_marker(marker_str)[0] + version_set = set() + pyversions, _ = parse_marker_dict(marker_dict) + if isinstance(pyversions, set): + version_set.update(pyversions) + elif pyversions is not None: + version_set.add(pyversions) + # Each distinct element in the set was separated by an "and" operator in the marker + # So we will need to reduce them with an intersection here rather than a union + # in order to find the boundaries + versions = set() + if version_set: + versions = reduce(lambda x, y: x & y, version_set) + return versions + + +@lru_cache(maxsize=128) +def contains_extra(marker): + """Check whehter a marker contains an "extra == ..." operand. + """ + if not marker: + return False + marker = _ensure_marker(marker) + return _markers_contains_extra(marker._markers) + + +@lru_cache(maxsize=128) +def contains_pyversion(marker): + """Check whether a marker contains a python_version operand. + """ + + if not marker: + return False + marker = _ensure_marker(marker) + return _markers_contains_pyversion(marker._markers) + + +def get_specset(marker_list): + # type: (List) -> Optional[SpecifierSet] + specset = set() + _last_str = "and" + for marker_parts in marker_list: + if isinstance(marker_parts, tuple): + variable, op, value = marker_parts + if variable.value != "python_version": + continue + if op.value == "in": + values = [v.strip() for v in value.value.split(",")] + specset.update(Specifier("=={0}".format(v)) for v in values) + elif op.value == "not in": + values = [v.strip() for v in value.value.split(",")] + bad_versions = ["3.0", "3.1", "3.2", "3.3"] + if len(values) >= 2 and any(v in values for v in bad_versions): + values = bad_versions + specset.update( + Specifier("!={0}".format(v.strip())) for v in sorted(bad_versions) + ) + else: + specset.add(Specifier("{0}{1}".format(op.value, value.value))) + elif isinstance(marker_parts, list): + specset.update(get_specset(marker_parts)) + elif isinstance(marker_parts, str): + _last_str = marker_parts + specifiers = SpecifierSet() + specifiers._specs = frozenset(specset) + return specifiers + + +def parse_marker_dict(marker_dict): + op = marker_dict["op"] + lhs = marker_dict["lhs"] + rhs = marker_dict["rhs"] + # This is where the spec sets for each side land if we have an "or" operator + side_spec_list = [] + side_markers_list = [] + finalized_marker = "" + # And if we hit the end of the parse tree we use this format string to make a marker + format_string = "{lhs} {op} {rhs}" + specset = SpecifierSet() + specs = set() + # Essentially we will iterate over each side of the parsed marker if either one is + # A mapping instance (i.e. a dictionary) and recursively parse and reduce the specset + # Union the "and" specs, intersect the "or"s to find the most appropriate range + if any(issubclass(type(side), Mapping) for side in (lhs, rhs)): + for side in (lhs, rhs): + side_specs = set() + side_markers = set() + if issubclass(type(side), Mapping): + merged_side_specs, merged_side_markers = parse_marker_dict(side) + side_specs.update(merged_side_specs) + side_markers.update(merged_side_markers) + else: + marker = _ensure_marker(side) + marker_parts = getattr(marker, "_markers", []) + if marker_parts[0][0].value == "python_version": + side_specs |= set(get_specset(marker_parts)) + else: + side_markers.add(str(marker)) + side_spec_list.append(side_specs) + side_markers_list.append(side_markers) + if op == "and": + # When we are "and"-ing things together, it probably makes the most sense + # to reduce them here into a single PySpec instance + specs = reduce(lambda x, y: set(x) | set(y), side_spec_list) + markers = reduce(lambda x, y: set(x) | set(y), side_markers_list) + if not specs and not markers: + return specset, finalized_marker + if markers and isinstance(markers, (tuple, list, Set)): + finalized_marker = Marker(" and ".join([m for m in markers if m])) + elif markers: + finalized_marker = str(markers) + specset._specs = frozenset(specs) + return specset, finalized_marker + # Actually when we "or" things as well we can also just turn them into a reduced + # set using this logic now + sides = reduce(lambda x, y: set(x) & set(y), side_spec_list) + finalized_marker = " or ".join( + [normalize_marker_str(m) for m in side_markers_list] + ) + specset._specs = frozenset(sorted(sides)) + return specset, finalized_marker + else: + # At the tip of the tree we are dealing with strings all around and they just need + # to be smashed together + specs = set() + if lhs == "python_version": + format_string = "{lhs}{op}{rhs}" + marker = Marker(format_string.format(**marker_dict)) + marker_parts = getattr(marker, "_markers", []) + _set = get_specset(marker_parts) + if _set: + specs |= set(_set) + specset._specs = frozenset(specs) + return specset, finalized_marker + + +def format_pyversion(parts): + op, val = parts + return "python_version {0} '{1}'".format(op, val) + + +def normalize_marker_str(marker): + marker_str = "" + if not marker: + return None + if not isinstance(marker, Marker): + marker = _ensure_marker(marker) + pyversion = get_contained_pyversions(marker) + marker = get_without_pyversion(marker) + if pyversion: + parts = cleanup_pyspecs(pyversion) + marker_str = " and ".join([format_pyversion(pv) for pv in parts]) + if marker: + if marker_str: + marker_str = "{0!s} and {1!s}".format(marker_str, marker) + else: + marker_str = "{0!s}".format(marker) + return marker_str.replace('"', "'") diff --git a/pipenv/vendor/requirementslib/models/pipfile.py b/pipenv/vendor/requirementslib/models/pipfile.py index 22eee048dd..3f7b20c2e6 100644 --- a/pipenv/vendor/requirementslib/models/pipfile.py +++ b/pipenv/vendor/requirementslib/models/pipfile.py @@ -7,22 +7,21 @@ import sys import attr -import tomlkit - import plette.models.base import plette.pipfiles - +import tomlkit from vistir.compat import FileNotFoundError, Path -from ..exceptions import RequirementError -from ..utils import is_editable, is_vcs, merge_items from .project import ProjectFile from .requirements import Requirement -from .utils import optional_instance_of, get_url_name - +from .utils import get_url_name, optional_instance_of, tomlkit_value_to_python from ..environment import MYPY_RUNNING +from ..exceptions import RequirementError +from ..utils import is_editable, is_vcs, merge_items + if MYPY_RUNNING: from typing import Union, Any, Dict, Iterable, Mapping, List, Text + package_type = Dict[Text, Dict[Text, Union[List[Text], Text]]] source_type = Dict[Text, Union[Text, bool]] sources_type = Iterable[source_type] @@ -46,7 +45,7 @@ def patch_plette(): def validate(cls, data): # type: (Any, Dict[Text, Any]) -> None - if not cerberus: # Skip validation if Cerberus is not available. + if not cerberus: # Skip validation if Cerberus is not available. return schema = cls.__SCHEMA__ key = id(schema) @@ -156,10 +155,12 @@ class Pipfile(object): path = attr.ib(validator=is_path, type=Path) projectfile = attr.ib(validator=is_projectfile, type=ProjectFile) _pipfile = attr.ib(type=PipfileLoader) - _pyproject = attr.ib(default=attr.Factory(tomlkit.document), type=tomlkit.toml_document.TOMLDocument) + _pyproject = attr.ib( + default=attr.Factory(tomlkit.document), type=tomlkit.toml_document.TOMLDocument + ) build_system = attr.ib(default=attr.Factory(dict), type=dict) - requirements = attr.ib(default=attr.Factory(list), type=list) - dev_requirements = attr.ib(default=attr.Factory(list), type=list) + _requirements = attr.ib(default=attr.Factory(list), type=list) + _dev_requirements = attr.ib(default=attr.Factory(list), type=list) @path.default def _get_path(self): @@ -188,7 +189,9 @@ def get_deps(self, dev=False, only=True): deps.update(self.pipfile._data["dev-packages"]) if only: return deps - return merge_items([deps, self.pipfile._data["packages"]]) + return tomlkit_value_to_python( + merge_items([deps, self.pipfile._data["packages"]]) + ) def get(self, k): # type: (Text) -> Any @@ -213,6 +216,7 @@ def __getitem__(self, k, *args, **kwargs): if "-" in k: section, _, pkg_type = k.rpartition("-") vals = getattr(pipfile.get(section, {}), "_data", {}) + vals = tomlkit_value_to_python(vals) if pkg_type == "vcs": retval = {k: v for k, v in vals.items() if is_vcs(v)} elif pkg_type == "editable": @@ -254,11 +258,7 @@ def read_projectfile(cls, path): :return: A project file with the model and location for interaction :rtype: :class:`~requirementslib.models.project.ProjectFile` """ - pf = ProjectFile.read( - path, - PipfileLoader, - invalid_ok=True - ) + pf = ProjectFile.read(path, PipfileLoader, invalid_ok=True) return pf @classmethod @@ -303,18 +303,10 @@ def load(cls, path, create=False): projectfile = cls.load_projectfile(path, create=create) pipfile = projectfile.model - dev_requirements = [ - Requirement.from_pipfile(k, getattr(v, "_data", v)) for k, v in pipfile.get("dev-packages", {}).items() - ] - requirements = [ - Requirement.from_pipfile(k, getattr(v, "_data", v)) for k, v in pipfile.get("packages", {}).items() - ] creation_args = { "projectfile": projectfile, "pipfile": pipfile, - "dev_requirements": dev_requirements, - "requirements": requirements, - "path": Path(projectfile.location) + "path": Path(projectfile.location), } return cls(**creation_args) @@ -333,6 +325,30 @@ def packages(self): # type: () -> List[Requirement] return self.requirements + @property + def dev_requirements(self): + # type: () -> List[Requirement] + if not self._dev_requirements: + packages = tomlkit_value_to_python(self.pipfile.get("dev-packages", {})) + self._dev_requirements = [ + Requirement.from_pipfile(k, v) + for k, v in packages.items() + if v is not None + ] + return self._dev_requirements + + @property + def requirements(self): + # type: () -> List[Requirement] + if not self._requirements: + packages = tomlkit_value_to_python(self.pipfile.get("packages", {})) + self._requirements = [ + Requirement.from_pipfile(k, v) + for k, v in packages.items() + if v is not None + ] + return self._requirements + def _read_pyproject(self): # type: () -> None pyproject = self.path.parent.joinpath("pyproject.toml") diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 5fb219acf7..30dbec4646 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -40,7 +40,20 @@ normalize_path, ) -from .setup_info import SetupInfo, _prepare_wheel_building_kwargs +from .markers import ( + cleanup_pyspecs, + contains_pyversion, + format_pyversion, + get_contained_pyversions, + normalize_marker_str, +) +from .setup_info import ( + SetupInfo, + _prepare_wheel_building_kwargs, + ast_parse_setup_py, + get_metadata, + parse_setup_cfg, +) from .url import URI from .utils import ( DIRECT_URL_RE, @@ -74,6 +87,7 @@ VCS_LIST, add_ssh_scheme_to_git_uri, get_setup_paths, + is_installable_dir, is_installable_file, is_vcs, strip_ssh_from_git_uri, @@ -195,6 +209,18 @@ def __repr__(self): except Exception: return "".format(self.__dict__.values()) + @property + def name_and_specifier(self): + name_str, spec_str = "", "" + if self.name: + name_str = "{0}".format(self.name.lower()) + extras_str = extras_to_string(self.extras) + if extras_str: + name_str = "{0}{1}".format(name_str, extras_str) + if self.specifier: + spec_str = "{0}".format(self.specifier) + return "{0}{1}".format(name_str, spec_str) + @classmethod def split_hashes(cls, line): # type: (S) -> Tuple[S, List[S]] @@ -216,6 +242,8 @@ def split_hashes(cls, line): def line_with_prefix(self): # type: () -> STRING_TYPE line = self.line + if self.is_named: + return self.name_and_specifier extras_str = extras_to_string(self.extras) if self.is_direct_url: line = self.link.url @@ -224,7 +252,7 @@ def line_with_prefix(self): line = self.link.url if "git+file:/" in line and "git+file:///" not in line: line = line.replace("git+file:/", "git+file:///") - else: + elif extras_str not in line: line = "{0}{1}".format(line, extras_str) if self.editable: return "-e {0}".format(line) @@ -487,10 +515,10 @@ def parse_hashes(self): :returns: Nothing :rtype: None """ - line, hashes = self.split_hashes(self.line) self.hashes = hashes self.line = line + return self def parse_extras(self): # type: () -> None @@ -499,7 +527,6 @@ def parse_extras(self): :returns: Nothing :rtype: None """ - extras = None if "@" in self.line or self.is_vcs or self.is_url: line = "{0}".format(self.line) @@ -525,11 +552,11 @@ def parse_extras(self): extras_set |= name_extras if extras_set is not None: self.extras = tuple(sorted(extras_set)) + return self def get_url(self): # type: () -> STRING_TYPE """Sets ``self.name`` if given a **PEP-508** style URL""" - line = self.line try: parsed = URI.parse(line) @@ -569,6 +596,10 @@ def name(self): if self._name is None and not self.is_named and not self.is_wheel: if self.setup_info: self._name = self.setup_info.name + elif self.is_wheel: + self._name = self._parse_wheel() + if not self._name: + self._name = self.ireq.name return self._name @name.setter @@ -781,6 +812,29 @@ def vcsrepo(self): self._vcsrepo = self._get_vcsrepo() return self._vcsrepo + @cached_property + def metadata(self): + # type: () -> Dict[Any, Any] + if self.is_local and is_installable_dir(self.path): + return get_metadata(self.path) + return {} + + @cached_property + def parsed_setup_cfg(self): + # type: () -> Dict[Any, Any] + if self.is_local and is_installable_dir(self.path): + if self.setup_cfg: + return parse_setup_cfg(self.setup_cfg) + return {} + + @cached_property + def parsed_setup_py(self): + # type: () -> Dict[Any, Any] + if self.is_local and is_installable_dir(self.path): + if self.setup_py: + return ast_parse_setup_py(self.setup_py) + return {} + @vcsrepo.setter def vcsrepo(self, repo): # type (VCSRepository) -> None @@ -843,7 +897,6 @@ def _parse_wheel(self): def _parse_name_from_link(self): # type: () -> Optional[STRING_TYPE] - if self.link is None: return None if getattr(self.link, "egg_fragment", None): @@ -881,8 +934,29 @@ def _parse_name_from_line(self): self._specifier = "{0}{1}".format(specifier, version) return name + def _parse_name_from_path(self): + # type: () -> Optional[S] + if self.path and self.is_local and is_installable_dir(self.path): + metadata = get_metadata(self.path) + if metadata: + name = metadata.get("name", "") + if name: + return name + parsed_setup_cfg = self.parsed_setup_cfg + if parsed_setup_cfg: + name = parsed_setup_cfg.get("name", "") + if name: + return name + + parsed_setup_py = self.parsed_setup_py + if parsed_setup_py: + name = parsed_setup_py.get("name", "") + if name: + return name + return None + def parse_name(self): - # type: () -> None + # type: () -> "Line" if self._name is None: name = None if self.link is not None: @@ -895,13 +969,17 @@ def parse_name(self): if "&" in name: # subdirectory fragments might also be in here name, _, _ = name.partition("&") - if self.is_named: + if name is None and self.is_named: name = self._parse_name_from_line() + elif name is None and self.is_file or self.is_url or self.is_path: + if self.is_local: + name = self._parse_name_from_path() if name is not None: name, extras = pip_shims.shims._strip_extras(name) if extras is not None and not self.extras: self.extras = tuple(sorted(set(parse_extras(extras)))) self._name = name + return self def _parse_requirement_from_vcs(self): # type: () -> Optional[PackagingRequirement] @@ -939,7 +1017,7 @@ def _parse_requirement_from_vcs(self): return self._requirement def parse_requirement(self): - # type: () -> None + # type: () -> "Line" if self._name is None: self.parse_name() if not self._name and not self.is_vcs and not self.is_named: @@ -982,9 +1060,10 @@ def parse_requirement(self): "dependencies. Please install remote dependency " "in the form {0}#egg=.".format(url) ) + return self def parse_link(self): - # type: () -> None + # type: () -> "Line" parsed_url = None # type: Optional[URI] if not is_valid_url(self.line) and ( self.line.startswith("./") @@ -1033,6 +1112,7 @@ def parse_link(self): self._link = parsed_link else: self._link = link + return self def parse_markers(self): # type: () -> None @@ -1110,8 +1190,7 @@ def line_is_installable(self): def parse(self): # type: () -> None - self.parse_hashes() - self.line, self.markers = split_markers_from_line(self.line) + self.line, self.markers = split_markers_from_line(self.parse_hashes().line) self.parse_extras() self.line = self.line.strip('"').strip("'").strip() if self.line.startswith("git+file:/") and not self.line.startswith( @@ -1184,8 +1263,8 @@ def from_line(cls, line, parsed_line=None): return cls(**creation_kwargs) @classmethod - def from_pipfile(cls, name, pipfile): # type: S # type: TPIPFILE - # type: (...) -> NamedRequirement + def from_pipfile(cls, name, pipfile): + # type: (S, TPIPFILE) -> NamedRequirement creation_args = {} # type: TPIPFILE if hasattr(pipfile, "keys"): attr_fields = [field.name for field in attr.fields(cls)] @@ -1471,84 +1550,12 @@ def get_uri(self): @name.default def get_name(self): # type: () -> STRING_TYPE - loc = self.path or self.uri - if loc and not self._uri_scheme: - self._uri_scheme = "path" if self.path else "file" - name = None # type: Optional[STRING_TYPE] - hashed_loc = None # type: Optional[STRING_TYPE] - hashed_name = None # type: Optional[STRING_TYPE] - if loc: - hashed_loc = hashlib.sha256(loc.encode("utf-8")).hexdigest() - hashed_name = hashed_loc[-7:] - if ( - getattr(self, "req", None) - and self.req is not None - and getattr(self.req, "name") - and self.req.name is not None - ): - if self.is_direct_url and self.req.name != hashed_name: - return self.req.name - if self.link and self.link.egg_fragment and self.link.egg_fragment != hashed_name: + if self.parsed_line and self.parsed_line.name: + return self.parsed_line.name + elif self.link and self.link.egg_fragment: return self.link.egg_fragment - elif self.link and self.link.is_wheel: - from pip_shims import Wheel - - self._has_hashed_name = False - return Wheel(self.link.filename).name - elif self.link and ( - (self.link.scheme == "file" or self.editable) - or (self.path and self.setup_path and os.path.isfile(str(self.setup_path))) - ): - _ireq = None # type: Optional[InstallRequirement] - target_path = "" # type: STRING_TYPE - if self.setup_py_dir: - target_path = Path(self.setup_py_dir).as_posix() - elif self.path: - target_path = Path(os.path.abspath(self.path)).as_posix() - if self.editable: - line = pip_shims.shims.path_to_url(target_path) - if self.extras: - line = "{0}[{1}]".format(line, ",".join(self.extras)) - _ireq = pip_shims.shims.install_req_from_editable(line) - else: - line = target_path - if self.extras: - line = "{0}[{1}]".format(line, ",".join(self.extras)) - _ireq = pip_shims.shims.install_req_from_line(line) - if getattr(self, "req", None) is not None: - _ireq.req = copy.deepcopy(self.req) - if self.extras and _ireq and not _ireq.extras: - _ireq.extras = set(self.extras) - from .setup_info import SetupInfo - - subdir = getattr(self, "subdirectory", None) - if self.setup_info is not None: - setupinfo = self.setup_info - else: - setupinfo = SetupInfo.from_ireq(_ireq, subdir=subdir) - if setupinfo: - self._setup_info = setupinfo - self._setup_info.get_info() - setupinfo_dict = setupinfo.as_dict() - setup_name = setupinfo_dict.get("name", None) - if setup_name: - name = setup_name - self._has_hashed_name = False - build_requires = setupinfo_dict.get("build_requires") - build_backend = setupinfo_dict.get("build_backend") - if build_requires and not self.pyproject_requires: - self.pyproject_requires = tuple(build_requires) - if build_backend and not self.pyproject_backend: - self.pyproject_backend = build_backend - if not name or name.lower() == "unknown": - self._has_hashed_name = True - name = hashed_name - name_in_link = getattr(self.link, "egg_fragment", "") if self.link else "" - if not self._has_hashed_name and name_in_link != name and self.link is not None: - self.link = create_link("{0}#egg={1}".format(self.link.url, name)) - if name is not None: - return name - return "" + elif self.setup_info and self.setup_info.name: + return self.setup_info.name @link.default def get_link(self): @@ -1581,34 +1588,6 @@ def get_requirement(self): if req: return req - req = init_requirement(normalize_name(self.name)) - if req is None: - raise ValueError( - "Failed to generate a requirement: missing name for {0!r}".format(self) - ) - req.editable = False - if self.link is not None: - req.line = self.link.url_without_fragment - elif self.uri is not None: - req.line = self.uri - else: - req.line = self.name - if self.path and self.link and self.link.scheme.startswith("file"): - req.local_file = True - req.path = self.path - if self.editable: - req.url = None - else: - req.url = self.link.url_without_fragment - else: - req.local_file = False - req.path = None - req.url = self.link.url_without_fragment - if self.editable: - req.editable = True - req.link = self.link - return req - @property def parsed_line(self): # type: () -> Optional[Line] @@ -1639,11 +1618,9 @@ def is_remote_artifact(self): if self.link is None: return False return ( - any( - self.link.scheme.startswith(scheme) - for scheme in ("http", "https", "ftp", "ftps", "uri") - ) - and (self.link.is_artifact or self.link.is_wheel) + self._parsed_line + and not self._parsed_line.is_local + and (self._parsed_line.is_artifact or self._parsed_line.is_wheel) and not self.editable ) @@ -1833,53 +1810,8 @@ def create( @classmethod def from_line(cls, line, editable=None, extras=None, parsed_line=None): # type: (AnyStr, Optional[bool], Optional[Tuple[AnyStr, ...]], Optional[Line]) -> F - line = line.strip('"').strip("'") - link = None - path = None - editable = line.startswith("-e ") - line = line.split(" ", 1)[1] if editable else line - setup_path = None - name = None - req = None - if not extras: - extras = () - else: - extras = tuple(extras) - if not any([is_installable_file(line), is_valid_url(line), is_file_url(line)]): - try: - req = init_requirement(line) - except Exception: - raise RequirementError( - "Supplied requirement is not installable: {0!r}".format(line) - ) - else: - name = getattr(req, "name", None) - line = getattr(req, "url", None) - vcs_type, prefer, relpath, path, uri, link = cls.get_link_from_line(line) - arg_dict = { - "path": relpath if relpath else path, - "uri": unquote(link.url_without_fragment), - "link": link, - "editable": editable, - "setup_path": setup_path, - "uri_scheme": prefer, - "line": line, - "extras": extras, - # "name": name, - } - if req is not None: - arg_dict["req"] = req - if parsed_line is not None: - arg_dict["parsed_line"] = parsed_line - if link and link.is_wheel: - from pip_shims import Wheel - - arg_dict["name"] = Wheel(link.filename).name - elif name: - arg_dict["name"] = name - elif link.egg_fragment: - arg_dict["name"] = link.egg_fragment - return cls.create(**arg_dict) + parsed_line = Line(line) + file_req_from_parsed_line(parsed_line) @classmethod def from_pipfile(cls, name, pipfile): @@ -1964,8 +1896,9 @@ def from_pipfile(cls, name, pipfile): line = "{0}&subdirectory={1}".format(line, pipfile["subdirectory"]) if editable: line = "-e {0}".format(line) - arg_dict["line"] = line - return cls.create(**arg_dict) # type: ignore + arg_dict["parsed_line"] = Line(line) + arg_dict["setup_info"] = arg_dict["parsed_line"].setup_info + return cls(**arg_dict) # type: ignore @property def line_part(self): @@ -2344,7 +2277,7 @@ def locked_vcs_repo(self, src_dir=None): ) if self.parsed_line and self._parsed_line: self._parsed_line.vcsrepo = vcsrepo - if self.req: + if self.req and not self.editable: self.req.specifier = SpecifierSet("=={0}".format(self.setup_info.version)) try: yield self._repo @@ -2407,83 +2340,8 @@ def from_pipfile(cls, name, pipfile): @classmethod def from_line(cls, line, editable=None, extras=None, parsed_line=None): # type: (AnyStr, Optional[bool], Optional[Tuple[AnyStr, ...]], Optional[Line]) -> F - relpath = None - if parsed_line is None: - parsed_line = Line(line) - if editable: - parsed_line.editable = editable - if extras: - parsed_line.extras = extras - if line.startswith("-e "): - editable = True - line = line.split(" ", 1)[1] - if "@" in line: - parsed = urllib_parse.urlparse(add_ssh_scheme_to_git_uri(line)) - if not parsed.scheme: - possible_name, _, line = line.partition("@") - possible_name = possible_name.strip() - line = line.strip() - possible_name, extras = pip_shims.shims._strip_extras(possible_name) - name = possible_name - line = "{0}#egg={1}".format(line, name) - vcs_type, prefer, relpath, path, uri, link = cls.get_link_from_line(line) - if not extras and link.egg_fragment: - name, extras = pip_shims.shims._strip_extras(link.egg_fragment) - else: - name, _ = pip_shims.shims._strip_extras(link.egg_fragment) - parsed_extras = None # type: Optional[List[STRING_TYPE]] - extras_tuple = None # type: Optional[Tuple[STRING_TYPE, ...]] - if not extras: - line, extras = pip_shims.shims._strip_extras(line) - if extras: - if isinstance(extras, six.string_types): - parsed_extras = parse_extras(extras) - if parsed_extras: - extras_tuple = tuple(parsed_extras) - subdirectory = link.subdirectory_fragment - ref = None - if uri: - uri, ref = split_ref_from_uri(uri) - if path is not None and "@" in path: - path, _ref = split_ref_from_uri(path) - if ref is None: - ref = _ref - if relpath and "@" in relpath: - relpath, ref = split_ref_from_uri(relpath) - - creation_args = { - "name": name if name else parsed_line.name, - "path": relpath or path, - "editable": editable, - "extras": extras_tuple, - "link": link, - "vcs_type": vcs_type, - "line": line, - "uri": uri, - "uri_scheme": prefer, - "parsed_line": parsed_line, - } - if relpath: - creation_args["relpath"] = relpath - # return cls.create(**creation_args) - cls_inst = cls( - name=name, - ref=ref, - vcs=vcs_type, - subdirectory=subdirectory, - link=link, - path=relpath or path, - editable=editable, - uri=uri, - extras=extras_tuple if extras_tuple else tuple(), - base_line=line, - parsed_line=parsed_line, - ) - if cls_inst.req and ( - cls_inst._parsed_line.ireq and not cls_inst.parsed_line.ireq.req - ): - cls_inst._parsed_line._ireq.req = cls_inst.req - return cls_inst + parsed_line = Line(line) + return vcs_req_from_parsed_line(parsed_line) @property def line_part(self): @@ -3238,15 +3096,32 @@ def merge_markers(self, markers): # type: (Union[AnyStr, Marker]) -> None if not isinstance(markers, Marker): markers = Marker(markers) - _markers = set() # type: Set[Marker] - if self.ireq and self.ireq.markers: - _markers.add(Marker(self.ireq.markers)) - _markers.add(markers) - new_markers = Marker(" or ".join([str(m) for m in sorted(_markers)])) - self.markers = str(new_markers) - if self.req and self.req.req: - self.req.req.marker = new_markers - return + _markers = [] # type: List[Marker] + ireq = self.as_ireq() + if ireq and ireq.markers: + ireq_marker = ireq.markers + _markers.append(str(ireq_marker)) + _markers.append(str(markers)) + marker_str = " and ".join([normalize_marker_str(m) for m in _markers if m]) + new_marker = Marker(marker_str) + line = copy.deepcopy(self._line_instance) + line.markers = marker_str + line.parsed_marker = new_marker + if getattr(line, "_requirement", None) is not None: + line._requirement.marker = new_marker + if getattr(line, "_ireq", None) is not None and line._ireq.req: + line._ireq.req.marker = new_marker + new_ireq = getattr(self, "ireq", None) + if new_ireq and new_ireq.req: + new_ireq.req.marker = new_marker + req = self.req + if req.req: + req_requirement = req.req + req_requirement.marker = new_marker + req = attr.evolve(req, req=req_requirement, parsed_line=line) + return attr.evolve( + self, markers=str(new_marker), ireq=new_ireq, req=req, line_instance=line + ) def file_req_from_parsed_line(parsed_line): diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 0f7bc177e8..8fe6506880 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1,11 +1,14 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function +import ast import atexit import contextlib +import importlib import os import shutil import sys +from functools import partial import attr import packaging.specifiers @@ -13,13 +16,22 @@ import packaging.version import pep517.envbuild import pep517.wrappers +import pkg_resources.extern.packaging.requirements as pkg_resources_requirements import six from appdirs import user_cache_dir from distlib.wheel import Wheel from packaging.markers import Marker from six.moves import configparser from six.moves.urllib.parse import unquote, urlparse, urlunparse -from vistir.compat import Iterable, Path, lru_cache +from vistir.compat import ( + FileNotFoundError, + Iterable, + Mapping, + Path, + fs_decode, + fs_encode, + lru_cache, +) from vistir.contextmanagers import cd, temp_path from vistir.misc import run from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p, rmtree @@ -36,9 +48,10 @@ from ..exceptions import RequirementError try: - from setuptools.dist import distutils + from setuptools.dist import distutils, Distribution except ImportError: import distutils + from distutils.core import Distribution try: @@ -50,6 +63,7 @@ if MYPY_RUNNING: from typing import ( Any, + Callable, Dict, List, Generator, @@ -60,11 +74,13 @@ Text, Set, AnyStr, + Sequence, ) from pip_shims.shims import InstallRequirement, PackageFinder from pkg_resources import ( PathMetadata, DistInfoDistribution, + EggInfoDistribution, Requirement as PkgResourcesRequirement, ) from packaging.requirements import Requirement as PackagingRequirement @@ -76,6 +92,7 @@ MarkerType = TypeVar("MarkerType", covariant=True, bound=Marker) STRING_TYPE = Union[str, bytes, Text] S = TypeVar("S", bytes, str, Text) + AST_SEQ = TypeVar("AST_SEQ", ast.Tuple, ast.List) CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv")) @@ -87,7 +104,7 @@ def pep517_subprocess_runner(cmd, cwd=None, extra_environ=None): - # type: (List[AnyStr], Optional[AnyStr], Optional[Dict[AnyStr, AnyStr]]) -> None + # type: (List[AnyStr], Optional[AnyStr], Optional[Mapping[S, S]]) -> None """The default method of calling the wrapper subprocess.""" env = os.environ.copy() if extra_environ: @@ -133,6 +150,132 @@ def __init__(self, source_dir, build_backend): self._subprocess_runner = pep517_subprocess_runner +def parse_special_directives(setup_entry, package_dir=None): + # type: (S, Optional[S]) -> S + rv = setup_entry + if not package_dir: + package_dir = os.getcwd() + if setup_entry.startswith("file:"): + _, path = setup_entry.split("file:") + path = path.strip() + if os.path.exists(path): + with open(path, "r") as fh: + rv = fh.read() + elif setup_entry.startswith("attr:"): + _, resource = setup_entry.split("attr:") + resource = resource.strip() + with temp_path(): + sys.path.insert(0, package_dir) + if "." in resource: + resource, _, attribute = resource.rpartition(".") + module = importlib.import_module(resource) + rv = getattr(module, attribute) + if not isinstance(rv, six.string_types): + rv = str(rv) + return rv + + +def make_base_requirements(reqs): + # type: (Sequence[STRING_TYPE]) -> Set[BaseRequirement] + requirements = set() + if not isinstance(reqs, (list, tuple, set)): + reqs = [reqs] + for req in reqs: + if isinstance(req, BaseRequirement): + requirements.add(req) + elif isinstance(req, pkg_resources_requirements.Requirement): + requirements.add(BaseRequirement.from_req(req)) + elif req and not req.startswith("#"): + requirements.add(BaseRequirement.from_string(req)) + return requirements + + +def setuptools_parse_setup_cfg(path): + from setuptools.config import read_configuration + + parsed = read_configuration(path) + results = parsed.get("metadata", {}) + results.update({parsed.get("options", {})}) + results["install_requires"] = make_base_requirements( + results.get("install_requires", []) + ) + extras = {} + for extras_section, extras in results.get("extras_require", {}).items(): + new_reqs = tuple(make_base_requirements(extras)) + if new_reqs: + extras[extras_section] = new_reqs + results["extras_require"] = extras + results["setup_requires"] = make_base_requirements(results.get("setup_requires", [])) + return results + + +def parse_setup_cfg(setup_cfg_path): + # type: (S) -> Dict[S, Union[S, None, Set[BaseRequirement], List[S], Tuple[S, Tuple[BaseRequirement]]]] + if os.path.exists(setup_cfg_path): + try: + return setuptools_parse_setup_cfg(setup_cfg_path) + except Exception: + pass + default_opts = { + "metadata": {"name": "", "version": ""}, + "options": { + "install_requires": "", + "python_requires": "", + "build_requires": "", + "setup_requires": "", + "extras": "", + "packages.find": {"where": "."}, + }, + } + parser = configparser.ConfigParser(default_opts) + parser.read(setup_cfg_path) + results = {} + package_dir = os.getcwd() + if parser.has_option("options", "packages.find"): + pkg_dir = parser.get("options", "packages.find") + if isinstance(package_dir, Mapping): + package_dir = os.path.join(package_dir, pkg_dir.get("where")) + elif parser.has_option("options", "packages"): + pkg_dir = parser.get("options", "packages") + if "find:" in pkg_dir: + _, pkg_dir = pkg_dir.split("find:") + pkg_dir = pkg_dir.strip() + package_dir = os.path.join(package_dir, pkg_dir) + if parser.has_option("metadata", "name"): + results["name"] = parse_special_directives( + parser.get("metadata", "name"), package_dir + ) + if parser.has_option("metadata", "version"): + results["version"] = parse_special_directives( + parser.get("metadata", "version"), package_dir + ) + install_requires = set() # type: Set[BaseRequirement] + if parser.has_option("options", "install_requires"): + install_requires = make_base_requirements( + parser.get("options", "install_requires").split("\n") + ) + results["install_requires"] = install_requires + if parser.has_option("options", "python_requires"): + results["python_requires"] = parse_special_directives( + parser.get("options", "python_requires"), package_dir + ) + if parser.has_option("options", "build_requires"): + results["build_requires"] = parser.get("options", "build_requires") + extras = {} + if "options.extras_require" in parser.sections(): + extras_require_section = parser.options("options.extras_require") + for section in extras_require_section: + if section in ["options", "metadata"]: + continue + section_contents = parser.get("options.extras_require", section) + section_list = section_contents.split("\n") + section_extras = tuple(make_base_requirements(section_list)) + if section_extras: + extras[section] = section_extras + results["extras_require"] = extras + return results + + @contextlib.contextmanager def _suppress_distutils_logs(): # type: () -> Generator[None, None, None] @@ -208,9 +351,12 @@ def ensure_reqs(reqs): def _prepare_wheel_building_kwargs( - ireq=None, src_root=None, src_dir=None, editable=False + ireq=None, # type: Optional[InstallRequirement] + src_root=None, # type: Optional[STRING_TYPE] + src_dir=None, # type: Optional[STRING_TYPE] + editable=False, # type: bool ): - # type: (Optional[InstallRequirement], Optional[AnyStr], Optional[AnyStr], bool) -> Dict[AnyStr, AnyStr] + # type: (...) -> Dict[STRING_TYPE, STRING_TYPE] download_dir = os.path.join(CACHE_DIR, "pkgs") # type: STRING_TYPE mkdir_p(download_dir) @@ -220,7 +366,7 @@ def _prepare_wheel_building_kwargs( if src_dir is None: if editable and src_root is not None: src_dir = src_root - elif ireq is None and src_root is not None: + elif ireq is None and src_root is not None and not editable: src_dir = _get_src_dir(root=src_root) # type: STRING_TYPE elif ireq is not None and ireq.editable and src_root is not None: src_dir = _get_src_dir(root=src_root) @@ -240,24 +386,45 @@ def _prepare_wheel_building_kwargs( } +class ScandirCloser(object): + def __init__(self, path): + self.iterator = scandir(path) + + def __next__(self): + return next(iter(self.iterator)) + + def __iter__(self): + return self + + def next(self): + return self.__next__() + + def close(self): + if getattr(self.iterator, "close", None): + self.iterator.close() + else: + pass + + def iter_metadata(path, pkg_name=None, metadata_type="egg-info"): # type: (AnyStr, Optional[AnyStr], AnyStr) -> Generator if pkg_name is not None: pkg_variants = get_name_variants(pkg_name) non_matching_dirs = [] - for entry in scandir(path): - if entry.is_dir(): - entry_name, ext = os.path.splitext(entry.name) - if ext.endswith(metadata_type): - if pkg_name is None or entry_name.lower() in pkg_variants: - yield entry - elif not entry.name.endswith(metadata_type): - non_matching_dirs.append(entry) - for entry in non_matching_dirs: - for dir_entry in iter_metadata( - entry.path, pkg_name=pkg_name, metadata_type=metadata_type - ): - yield dir_entry + with contextlib.closing(ScandirCloser(path)) as path_iterator: + for entry in path_iterator: + if entry.is_dir(): + entry_name, ext = os.path.splitext(entry.name) + if ext.endswith(metadata_type): + if pkg_name is None or entry_name.lower() in pkg_variants: + yield entry + elif not entry.name.endswith(metadata_type): + non_matching_dirs.append(entry) + for entry in non_matching_dirs: + for dir_entry in iter_metadata( + entry.path, pkg_name=pkg_name, metadata_type=metadata_type + ): + yield dir_entry def find_egginfo(target, pkg_name=None): @@ -290,39 +457,47 @@ def find_distinfo(target, pkg_name=None): yield dist_dir +def get_distinfo_dist(path, pkg_name=None): + # type: (S, Optional[S]) -> Optional[DistInfoDistribution] + import pkg_resources + + dist_dir = next(iter(find_distinfo(path, pkg_name=pkg_name)), None) + if dist_dir is not None: + metadata_dir = dist_dir.path + base_dir = os.path.dirname(metadata_dir) + dist = next(iter(pkg_resources.find_distributions(base_dir)), None) + if dist is not None: + return dist + return None + + +def get_egginfo_dist(path, pkg_name=None): + # type: (S, Optional[S]) -> Optional[EggInfoDistribution] + import pkg_resources + + egg_dir = next(iter(find_egginfo(path, pkg_name=pkg_name)), None) + if egg_dir is not None: + metadata_dir = egg_dir.path + base_dir = os.path.dirname(metadata_dir) + path_metadata = pkg_resources.PathMetadata(base_dir, metadata_dir) + dist_iter = pkg_resources.distributions_from_metadata(path_metadata.egg_info) + dist = next(iter(dist_iter), None) + if dist is not None: + return dist + return None + + def get_metadata(path, pkg_name=None, metadata_type=None): # type: (S, Optional[S], Optional[S]) -> Dict[S, Union[S, List[RequirementType], Dict[S, RequirementType]]] - metadata_dirs = [] wheel_allowed = metadata_type == "wheel" or metadata_type is None egg_allowed = metadata_type == "egg" or metadata_type is None - egg_dir = next(iter(find_egginfo(path, pkg_name=pkg_name)), None) - dist_dir = next(iter(find_distinfo(path, pkg_name=pkg_name)), None) - if dist_dir and wheel_allowed: - metadata_dirs.append(dist_dir) - if egg_dir and egg_allowed: - metadata_dirs.append(egg_dir) - matched_dir = next(iter(d for d in metadata_dirs if d is not None), None) - metadata_dir = None - base_dir = None - if matched_dir is not None: - import pkg_resources - - metadata_dir = os.path.abspath(matched_dir.path) - base_dir = os.path.dirname(metadata_dir) - dist = None - distinfo_dist = None - egg_dist = None - if wheel_allowed and dist_dir is not None: - distinfo_dist = next(iter(pkg_resources.find_distributions(base_dir)), None) - if egg_allowed and egg_dir is not None: - path_metadata = pkg_resources.PathMetadata(base_dir, metadata_dir) - egg_dist = next( - iter(pkg_resources.distributions_from_metadata(path_metadata.egg_info)), - None, - ) - dist = next(iter(d for d in (distinfo_dist, egg_dist) if d is not None), None) - if dist is not None: - return get_metadata_from_dist(dist) + dist = None # type: Optional[Union[DistInfoDistribution, EggInfoDistribution]] + if wheel_allowed: + dist = get_distinfo_dist(path, pkg_name=pkg_name) + if egg_allowed and dist is None: + dist = get_egginfo_dist(path, pkg_name=pkg_name) + if dist is not None: + return get_metadata_from_dist(dist) return {} @@ -371,7 +546,7 @@ def get_metadata_from_wheel(wheel_path): def get_metadata_from_dist(dist): - # type: (Union[PathMetadata, DistInfoDistribution]) -> Dict[S, Union[S, List[RequirementType], Dict[S, RequirementType]]] + # type: (Union[PathMetadata, EggInfoDistribution, DistInfoDistribution]) -> Dict[S, Union[S, List[RequirementType], Dict[S, RequirementType]]] try: requires = dist.requires() except Exception: @@ -392,8 +567,12 @@ def get_metadata_from_dist(dist): if k.startswith(":python_version"): marker = k.replace(":", "; ") else: - marker = "" - extra = "{0}".format(k) + if ":python_version" in k: + extra, _, marker = k.partition(":") + marker = "; {0}".format(marker) + else: + marker = "" + extra = "{0}".format(k) _deps = ["{0}{1}".format(str(req), marker) for req in _deps] _deps = ensure_reqs(tuple(_deps)) if extra: @@ -408,6 +587,187 @@ def get_metadata_from_dist(dist): } +class Analyzer(ast.NodeVisitor): + def __init__(self): + self.name_types = [] + self.function_map = {} # type: Dict[Any, Any] + self.functions = [] + self.strings = [] + self.assignments = {} + super(Analyzer, self).__init__() + + def generic_visit(self, node): + if isinstance(node, ast.Call): + self.functions.append(node) + self.function_map.update(ast_unparse(node, initial_mapping=True)) + if isinstance(node, ast.Name): + self.name_types.append(node) + if isinstance(node, ast.Str): + self.strings.append(node) + if isinstance(node, ast.Assign): + self.assignments.update(ast_unparse(node, initial_mapping=True)) + super(Analyzer, self).generic_visit(node) + + def match_assignment_str(self, match): + return next( + iter(k for k in self.assignments if getattr(k, "id", "") == match), None + ) + + def match_assignment_name(self, match): + return next( + iter(k for k in self.assignments if getattr(k, "id", "") == match.id), None + ) + + +def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # noqa:C901 + # type: (Any, bool, Optional[Analyzer], bool) -> Union[List[Any], Dict[Any, Any], Tuple[Any, ...], STRING_TYPE] + unparse = partial(ast_unparse, initial_mapping=initial_mapping, analyzer=analyzer) + if isinstance(item, ast.Dict): + unparsed = dict(zip(unparse(item.keys), unparse(item.values))) + elif isinstance(item, ast.List): + unparsed = [unparse(el) for el in item.elts] + elif isinstance(item, ast.Tuple): + unparsed = tuple([unparse(el) for el in item.elts]) + elif isinstance(item, ast.Str): + unparsed = item.s + elif isinstance(item, ast.Subscript): + unparsed = unparse(item.value) + elif isinstance(item, ast.Name): + if not initial_mapping: + if analyzer and recurse: + if item in analyzer.assignments: + items = unparse(analyzer.assignments[item]) + unparsed = items.get(item.id, item.id) + else: + assignment = analyzer.match_assignment_name(item) + if assignment is not None: + items = unparse(analyzer.assignments[assignment]) + unparsed = items.get(item.id, item.id) + else: + unparsed = item.id + else: + unparsed = item.id + else: + unparsed = item + elif six.PY3 and isinstance(item, ast.NameConstant): + unparsed = item.value + elif isinstance(item, ast.Call): + unparsed = {} + if isinstance(item.func, ast.Name): + name = unparse(item.func) + unparsed[name] = {} + for keyword in item.keywords: + unparsed[name].update(unparse(keyword)) + elif isinstance(item, ast.keyword): + unparsed = {unparse(item.arg): unparse(item.value)} + elif isinstance(item, ast.Assign): + # XXX: DO NOT UNPARSE THIS + # XXX: If we unparse this it becomes impossible to map it back + # XXX: To the original node in the AST so we can find the + # XXX: Original reference + if not initial_mapping: + target = unparse(next(iter(item.targets)), recurse=False) + val = unparse(item.value) + if isinstance(target, (tuple, set, list)): + unparsed = dict(zip(target, val)) + else: + unparsed = {target: val} + else: + unparsed = {next(iter(item.targets)): item} + elif isinstance(item, Mapping): + unparsed = {} + for k, v in item.items(): + try: + unparsed[unparse(k)] = unparse(v) + except TypeError: + unparsed[k] = unparse(v) + elif isinstance(item, (list, tuple)): + unparsed = type(item)([unparse(el) for el in item]) + elif isinstance(item, six.string_types): + unparsed = item + else: + return item + return unparsed + + +def ast_parse_setup_py(path): + # type: (S) -> Dict[Any, Any] + with open(path, "r") as fh: + tree = ast.parse(fh.read()) + ast_analyzer = Analyzer() + ast_analyzer.visit(tree) + setup = {} # type: Dict[Any, Any] + for k, v in ast_analyzer.function_map.items(): + if isinstance(k, ast.Name) and k.id == "setup": + setup = v + cleaned_setup = ast_unparse(setup, analyzer=ast_analyzer) + return cleaned_setup + + +def run_setup(script_path, egg_base=None): + # type: (str, Optional[str]) -> Distribution + """Run a `setup.py` script with a target **egg_base** if provided. + + :param S script_path: The path to the `setup.py` script to run + :param Optional[S] egg_base: The metadata directory to build in + :raises FileNotFoundError: If the provided `script_path` does not exist + :return: The metadata dictionary + :rtype: Dict[Any, Any] + """ + + if not os.path.exists(script_path): + raise FileNotFoundError(script_path) + target_cwd = os.path.dirname(os.path.abspath(script_path)) + if egg_base is None: + egg_base = os.path.join(target_cwd, "reqlib-metadata") + with temp_path(), cd(target_cwd), _suppress_distutils_logs(): + # This is for you, Hynek + # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py + args = ["egg_info"] + if egg_base: + args += ["--egg-base", egg_base] + script_name = os.path.basename(script_path) + g = {"__file__": script_name, "__name__": "__main__"} + sys.path.insert(0, target_cwd) + local_dict = {} + if sys.version_info < (3, 5): + save_argv = sys.argv + else: + save_argv = sys.argv.copy() + try: + global _setup_distribution, _setup_stop_after + _setup_stop_after = "run" + sys.argv[0] = script_name + sys.argv[1:] = args + with open(script_name, "rb") as f: + contents = f.read() + if six.PY3: + contents.replace(br"\r\n", br"\n") + else: + contents.replace(r"\r\n", r"\n") + if sys.version_info < (3, 5): + exec(contents, g, local_dict) + else: + exec(contents, g) + # We couldn't import everything needed to run setup + except Exception: + python = os.environ.get("PIP_PYTHON_PATH", sys.executable) + out, _ = run( + [python, "setup.py"] + args, + cwd=target_cwd, + block=True, + combine_stderr=False, + return_object=False, + nospin=True, + ) + finally: + _setup_stop_after = None + sys.argv = save_argv + _setup_distribution = get_metadata(egg_base, metadata_type="egg") + dist = _setup_distribution + return dist + + @attr.s(slots=True, frozen=True) class BaseRequirement(object): name = attr.ib(default="", cmp=True) # type: STRING_TYPE @@ -420,11 +780,11 @@ def __str__(self): return "{0}".format(str(self.requirement)) def as_dict(self): - # type: () -> Dict[S, Optional[PkgResourcesRequirement]] + # type: () -> Dict[STRING_TYPE, Optional[PkgResourcesRequirement]] return {self.name: self.requirement} def as_tuple(self): - # type: () -> Tuple[S, Optional[PkgResourcesRequirement]] + # type: () -> Tuple[STRING_TYPE, Optional[PkgResourcesRequirement]] return (self.name, self.requirement) @classmethod @@ -458,19 +818,19 @@ class Extra(object): def __str__(self): # type: () -> S return "{0}: {{{1}}}".format( - self.section, ", ".join([r.name for r in self.requirements]) + self.name, ", ".join([r.name for r in self.requirements]) ) def add(self, req): - # type: (BaseRequirement) -> None + # type: (BaseRequirement) -> "Extra" if req not in self.requirements: - return attr.evolve( - self, requirements=frozenset(set(self.requirements).add(req)) - ) + current_set = set(self.requirements) + current_set.add(req) + return attr.evolve(self, requirements=frozenset(current_set)) return self def as_dict(self): - # type: () -> Dict[S, Tuple[RequirementType, ...]] + # type: () -> Dict[STRING_TYPE, Tuple[RequirementType, ...]] return {self.name: tuple([r.requirement for r in self.requirements])} @@ -478,15 +838,17 @@ def as_dict(self): class SetupInfo(object): name = attr.ib(default=None, cmp=True) # type: STRING_TYPE base_dir = attr.ib(default=None, cmp=True, hash=False) # type: STRING_TYPE - version = attr.ib(default=None, cmp=True) # type: STRING_TYPE - _requirements = attr.ib(type=frozenset, factory=frozenset, cmp=True, hash=True) - build_requires = attr.ib(type=tuple, default=attr.Factory(tuple), cmp=True) + _version = attr.ib(default=None, cmp=True) # type: STRING_TYPE + _requirements = attr.ib( + type=frozenset, factory=frozenset, cmp=True, hash=True + ) # type: Optional[frozenset] + build_requires = attr.ib(default=None, cmp=True) # type: Optional[Tuple] build_backend = attr.ib(cmp=True) # type: STRING_TYPE - setup_requires = attr.ib(type=tuple, default=attr.Factory(tuple), cmp=True) + setup_requires = attr.ib(default=None, cmp=True) # type: Optional[Tuple] python_requires = attr.ib( - type=packaging.specifiers.SpecifierSet, default=None, cmp=True - ) - _extras_requirements = attr.ib(type=tuple, default=attr.Factory(tuple), cmp=True) + default=None, cmp=True + ) # type: Optional[packaging.specifiers.SpecifierSet] + _extras_requirements = attr.ib(default=None, cmp=True) # type: Optional[Tuple] setup_cfg = attr.ib(type=Path, default=None, cmp=True, hash=False) setup_py = attr.ib(type=Path, default=None, cmp=True, hash=False) pyproject = attr.ib(type=Path, default=None, cmp=True, hash=False) @@ -498,17 +860,23 @@ class SetupInfo(object): @build_backend.default def get_build_backend(self): - # type: () -> S + # type: () -> STRING_TYPE return get_default_pyproject_backend() @property def requires(self): # type: () -> Dict[S, RequirementType] + if self._requirements is None: + self._requirements = frozenset() + self.get_info() return {req.name: req.requirement for req in self._requirements} @property def extras(self): # type: () -> Dict[S, Optional[Any]] + if self._extras_requirements is None: + self._extras_requirements = () + self.get_info() extras_dict = {} extras = set(self._extras_requirements) for section, deps in extras: @@ -518,58 +886,18 @@ def extras(self): extras_dict[section] = [d.requirement for d in deps] return extras_dict + @property + def version(self): + # type: () -> Optional[str] + if not self._version: + info = self.get_info() + self._version = info.get("version", None) + return self._version + @classmethod def get_setup_cfg(cls, setup_cfg_path): # type: (S) -> Dict[S, Union[S, None, Set[BaseRequirement], List[S], Tuple[S, Tuple[BaseRequirement]]]] - if os.path.exists(setup_cfg_path): - default_opts = { - "metadata": {"name": "", "version": ""}, - "options": { - "install_requires": "", - "python_requires": "", - "build_requires": "", - "setup_requires": "", - "extras": "", - }, - } - parser = configparser.ConfigParser(default_opts) - parser.read(setup_cfg_path) - results = {} - if parser.has_option("metadata", "name"): - results["name"] = parser.get("metadata", "name") - if parser.has_option("metadata", "version"): - results["version"] = parser.get("metadata", "version") - install_requires = set() # type: Set[BaseRequirement] - if parser.has_option("options", "install_requires"): - install_requires = set( - [ - BaseRequirement.from_string(dep) - for dep in parser.get("options", "install_requires").split("\n") - if dep - ] - ) - results["install_requires"] = install_requires - if parser.has_option("options", "python_requires"): - results["python_requires"] = parser.get("options", "python_requires") - if parser.has_option("options", "build_requires"): - results["build_requires"] = parser.get("options", "build_requires") - extras = [] - if "options.extras_require" in parser.sections(): - extras_require_section = parser.options("options.extras_require") - for section in extras_require_section: - if section in ["options", "metadata"]: - continue - section_contents = parser.get("options.extras_require", section) - section_list = section_contents.split("\n") - section_extras = [] - for extra_name in section_list: - if not extra_name or extra_name.startswith("#"): - continue - section_extras.append(BaseRequirement.from_string(extra_name)) - if section_extras: - extras.append(tuple([section, tuple(section_extras)])) - results["extras_require"] = tuple(extras) - return results + return parse_setup_cfg(setup_cfg_path) @property def egg_base(self): @@ -587,123 +915,125 @@ def egg_base(self): base = Path(self.extra_kwargs["src_dir"]) egg_base = base.joinpath("reqlib-metadata") if not egg_base.exists(): - atexit.register(rmtree, egg_base.as_posix()) + atexit.register(rmtree, fs_encode(egg_base.as_posix())) egg_base.mkdir(parents=True, exist_ok=True) return egg_base.as_posix() - def parse_setup_cfg(self): + def update_from_dict(self, metadata): + name = metadata.get("name", self.name) + if isinstance(name, six.string_types): + self.name = self.name if self.name else name + version = metadata.get("version", None) + if version: + try: + packaging.version.parse(version) + except TypeError: + version = self.version if self.version else None + else: + version = version + if version: + self._version = version + build_requires = metadata.get("build_requires", []) + if self.build_requires is None: + self.build_requires = () + self.build_requires = tuple(set(self.build_requires) | set(build_requires)) + self._requirements = ( + frozenset() if self._requirements is None else self._requirements + ) + requirements = set(self._requirements) + install_requires = make_base_requirements(metadata.get("install_requires", [])) + requirements |= install_requires + setup_requires = make_base_requirements(metadata.get("setup_requires", [])) + if self.setup_requires is None: + self.setup_requires = () + self.setup_requires = tuple(set(self.setup_requires) | setup_requires) + if self.ireq.editable: + requirements |= setup_requires + # TODO: Should this be a specifierset? + self.python_requires = metadata.get("python_requires", self.python_requires) + extras_require = metadata.get("extras_require", {}) + extras_tuples = [] + for section in set(list(extras_require.keys())) - set(list(self.extras.keys())): + extras = extras_require[section] + extras_set = make_base_requirements(extras) + if self.ireq and self.ireq.extras and section in self.ireq.extras: + requirements |= extras_set + extras_tuples.append((section, tuple(extras_set))) + if self._extras_requirements is None: + self._extras_requirements = () + self._extras_requirements += tuple(extras_tuples) + build_backend = metadata.get("build_backend", "setuptools.build_meta:__legacy__") + if not self.build_backend: + self.build_backend = build_backend + self._requirements = frozenset(requirements) + + def get_extras_from_ireq(self): # type: () -> None + if self.ireq and self.ireq.extras: + for extra in self.ireq.extras: + if extra in self.extras: + extras = make_base_requirements(self.extras[extra]) + self._requirements = frozenset(set(self._requirements) | extras) + else: + extras = tuple(make_base_requirements(extra)) + self._extras_requirements += (extra, extras) + + def parse_setup_cfg(self): + # type: () -> Dict[STRING_TYPE, Any] if self.setup_cfg is not None and self.setup_cfg.exists(): parsed = self.get_setup_cfg(self.setup_cfg.as_posix()) - if self.name is None: - self.name = parsed.get("name") - if self.version is None: - self.version = parsed.get("version") - build_requires = parsed.get("build_requires", []) - if self.build_requires: - self.build_requires = tuple( - set(self.build_requires) | set(build_requires) - ) - self._requirements = frozenset( - set(self._requirements) | set(parsed["install_requires"]) - ) - if self.python_requires is None: - self.python_requires = parsed.get("python_requires") - if not self._extras_requirements: - self._extras_requirements = parsed["extras_require"] - else: - self._extras_requirements = ( - self._extras_requirements + parsed["extras_require"] - ) - if self.ireq is not None and self.ireq.extras: - for extra in self.ireq.extras: - if extra in self.extras: - extras_tuple = tuple( - [BaseRequirement.from_req(req) for req in self.extras[extra]] - ) - self._extras_requirements += ((extra, extras_tuple),) - self._requirements = frozenset( - set(self._requirements) | set(list(extras_tuple)) - ) + if not parsed: + return {} + return parsed + return {} + + def parse_setup_py(self): + # type: () -> Dict[STRING_TYPE, Any] + if self.setup_py is not None and self.setup_py.exists(): + parsed = ast_parse_setup_py(self.setup_py.as_posix()) + if not parsed: + return {} + return parsed + return {} def run_setup(self): - # type: () -> None + # type: () -> "SetupInfo" if self.setup_py is not None and self.setup_py.exists(): + dist = run_setup(self.setup_py.as_posix(), egg_base=self.egg_base) target_cwd = self.setup_py.parent.as_posix() - with temp_path(), cd(target_cwd), _suppress_distutils_logs(): - # This is for you, Hynek - # see https://github.com/hynek/environ_config/blob/69b1c8a/setup.py - script_name = self.setup_py.as_posix() - args = ["egg_info", "--egg-base", self.egg_base] - g = {"__file__": script_name, "__name__": "__main__"} - sys.path.insert(0, os.path.dirname(os.path.abspath(script_name))) - local_dict = {} - if sys.version_info < (3, 5): - save_argv = sys.argv - else: - save_argv = sys.argv.copy() - try: - global _setup_distribution, _setup_stop_after - _setup_stop_after = "run" - sys.argv[0] = script_name - sys.argv[1:] = args - with open(script_name, "rb") as f: - if sys.version_info < (3, 5): - exec(f.read(), g, local_dict) - else: - exec(f.read(), g) - # We couldn't import everything needed to run setup - except NameError: - python = os.environ.get("PIP_PYTHON_PATH", sys.executable) - out, _ = run( - [python, "setup.py"] + args, - cwd=target_cwd, - block=True, - combine_stderr=False, - return_object=False, - nospin=True, - ) - finally: - _setup_stop_after = None - sys.argv = save_argv - dist = _setup_distribution + with temp_path(), cd(target_cwd): if not dist: - self.get_egg_metadata() - return + metadata = self.get_egg_metadata() + if metadata: + return self.populate_metadata(metadata) + if isinstance(dist, Mapping): + self.populate_metadata(dist) + return name = dist.get_name() if name: self.name = name - if dist.python_requires and not self.python_requires: - self.python_requires = packaging.specifiers.SpecifierSet( - dist.python_requires - ) - if not self._extras_requirements: - self._extras_requirements = () - if dist.extras_require and not self.extras: + update_dict = {} + if dist.python_requires: + update_dict["python_requires"] = dist.python_requires + update_dict["extras_require"] = {} + if dist.extras_require: for extra, extra_requires in dist.extras_require: - extras_tuple = tuple( - BaseRequirement.from_req(req) for req in extra_requires - ) - self._extras_requirements += ((extra, extras_tuple),) - install_requires = dist.get_requires() - if not install_requires: - install_requires = dist.install_requires - if install_requires and not self.requires: - requirements = set( - [BaseRequirement.from_req(req) for req in install_requires] + extras_tuple = make_base_requirements(extra_requires) + update_dict["extras_require"][extra] = extras_tuple + update_dict["install_requires"] = make_base_requirements( + dist.get_requires() + ) + if dist.setup_requires: + update_dict["setup_requires"] = make_base_requirements( + dist.setup_requires ) - if getattr(self.ireq, "extras", None): - for extra in self.ireq.extras: - requirements |= set(list(self.extras.get(extra, []))) - self._requirements = frozenset(set(self._requirements) | requirements) - if dist.setup_requires and not self.setup_requires: - self.setup_requires = tuple(dist.setup_requires) - if not self.version: - self.version = dist.get_version() + version = dist.get_version() + if version: + update_dict["version"] = version + return self.update_from_dict(update_dict) @property - @lru_cache() def pep517_config(self): config = {} config.setdefault("--global-option", []) @@ -733,7 +1063,12 @@ def build_wheel(self): def build_sdist(self): # type: () -> S if not self.pyproject.exists(): - build_requires = ", ".join(['"{0}"'.format(r) for r in self.build_requires]) + if not self.build_requires: + build_requires = '"setuptools", "wheel"' + else: + build_requires = ", ".join( + ['"{0}"'.format(r) for r in self.build_requires] + ) self.pyproject.write_text( u""" [build-system] @@ -751,30 +1086,38 @@ def build_sdist(self): ) def build(self): - # type: () -> None + # type: () -> "SetupInfo" dist_path = None try: dist_path = self.build_wheel() except Exception: try: dist_path = self.build_sdist() - self.get_egg_metadata(metadata_type="egg") + metadata = self.get_egg_metadata(metadata_type="egg") + if metadata: + self.populate_metadata(metadata) except Exception: pass else: - self.get_metadata_from_wheel( + metadata = self.get_metadata_from_wheel( os.path.join(self.extra_kwargs["build_dir"], dist_path) ) + if metadata: + self.populate_metadata(metadata) if not self.metadata or not self.name: - self.get_egg_metadata() + metadata = self.get_egg_metadata() + if metadata: + self.populate_metadata(metadata) if not self.metadata or not self.name: - self.run_setup() - return None + return self.run_setup() + return self def reload(self): # type: () -> Dict[S, Any] - """ - Wipe existing distribution info metadata for rebuilding. + """Wipe existing distribution info metadata for rebuilding. + + Erases metadata from **self.egg_base** and unsets **self.requirements** + and **self.extras**. """ for metadata_dir in os.listdir(self.egg_base): shutil.rmtree(metadata_dir, ignore_errors=True) @@ -785,15 +1128,27 @@ def reload(self): def get_metadata_from_wheel(self, wheel_path): # type: (S) -> Dict[Any, Any] + """Given a path to a wheel, return the metadata from that wheel. + + :return: A dictionary of metadata from the provided wheel + :rtype: Dict[Any, Any] + """ + metadata_dict = get_metadata_from_wheel(wheel_path) - if metadata_dict: - self.populate_metadata(metadata_dict) + return metadata_dict def get_egg_metadata(self, metadata_dir=None, metadata_type=None): - # type: (Optional[AnyStr], Optional[AnyStr]) -> None + # type: (Optional[AnyStr], Optional[AnyStr]) -> Dict[Any, Any] + """Given a metadata directory, return the corresponding metadata dictionary. + + :param Optional[str] metadata_dir: Root metadata path, default: `os.getcwd()` + :param Optional[str] metadata_type: Type of metadata to search for, default None + :return: A metadata dictionary built from the metadata in the given location + :rtype: Dict[Any, Any] + """ + package_indicators = [self.pyproject, self.setup_py, self.setup_cfg] - # if self.setup_py is not None and self.setup_py.exists(): - metadata_dirs = [] + metadata_dirs = [] # type: List[STRING_TYPE] if any([fn is not None and fn.exists() for fn in package_indicators]): metadata_dirs = [ self.extra_kwargs["build_dir"], @@ -805,14 +1160,19 @@ def get_egg_metadata(self, metadata_dir=None, metadata_type=None): metadata = [ get_metadata(d, pkg_name=self.name, metadata_type=metadata_type) for d in metadata_dirs - if os.path.exists(d) + if os.path.exists(fs_encode(d)) ] metadata = next(iter(d for d in metadata if d), None) - if metadata is not None: - self.populate_metadata(metadata) + return metadata def populate_metadata(self, metadata): - # type: (Dict[Any, Any]) -> None + # type: (Dict[Any, Any]) -> "SetupInfo" + """Populates the metadata dictionary from the supplied metadata. + + :return: The current instance. + :rtype: `SetupInfo` + """ + _metadata = () for k, v in metadata.items(): if k == "extras" and isinstance(v, dict): @@ -825,36 +1185,28 @@ def populate_metadata(self, metadata): else: _metadata += (k, v) self.metadata = _metadata - if self.name is None: - self.name = metadata.get("name", self.name) - if not self.version: - self.version = metadata.get("version", self.version) - self._requirements = frozenset( - set(self._requirements) - | set([BaseRequirement.from_req(req) for req in metadata.get("requires", [])]) - ) - if getattr(self.ireq, "extras", None): - for extra in self.ireq.extras: - extras = metadata.get("extras", {}).get(extra, []) - if extras: - extras_tuple = tuple( - [ - BaseRequirement.from_req(req) - for req in ensure_reqs(tuple(extras)) - if req is not None - ] - ) - self._extras_requirements += ((extra, extras_tuple),) - self._requirements = frozenset( - set(self._requirements) | set(extras_tuple) - ) + cleaned = metadata.copy() + cleaned.update({"install_requires": metadata.get("requires", [])}) + if cleaned: + self.update_from_dict(cleaned.copy()) + else: + self.update_from_dict(metadata) + return self def run_pyproject(self): - # type: () -> None + # type: () -> "SetupInfo" + """Populates the **pyproject.toml** metadata if available. + + :return: The current instance + :rtype: `SetupInfo` + """ + if self.pyproject and self.pyproject.exists(): result = get_pyproject(self.pyproject.parent) if result is not None: requires, backend = result + if self.build_requires is None: + self.build_requires = () if backend: self.build_backend = backend else: @@ -863,13 +1215,35 @@ def run_pyproject(self): self.build_requires = tuple(set(requires) | set(self.build_requires)) else: self.build_requires = ("setuptools", "wheel") + return self - def get_info(self): + def get_initial_info(self): # type: () -> Dict[S, Any] + parse_setupcfg = False + parse_setuppy = False if self.setup_cfg and self.setup_cfg.exists(): + parse_setupcfg = True + if self.setup_py and self.setup_py.exists(): + parse_setuppy = True + if parse_setuppy or parse_setupcfg: with cd(self.base_dir): - self.parse_setup_cfg() + if parse_setuppy: + self.update_from_dict(self.parse_setup_py()) + if parse_setupcfg: + self.update_from_dict(self.parse_setup_cfg()) + if self.name is not None and any( + [ + self.requires, + self.setup_requires, + self._extras_requirements, + self.build_backend, + ] + ): + return self.as_dict() + return self.get_info() + def get_info(self): + # type: () -> Dict[S, Any] with cd(self.base_dir): self.run_pyproject() self.build() @@ -881,26 +1255,30 @@ def get_info(self): self.run_setup() except Exception: with cd(self.base_dir): - self.get_egg_metadata() + metadata = self.get_egg_metadata() + if metadata: + self.populate_metadata(metadata) if self.metadata is None or not self.name: with cd(self.base_dir): - self.get_egg_metadata() + metadata = self.get_egg_metadata() + if metadata: + self.populate_metadata(metadata) return self.as_dict() def as_dict(self): - # type: () -> Dict[S, Any] + # type: () -> Dict[STRING_TYPE, Any] prop_dict = { "name": self.name, - "version": self.version, + "version": self.version if self._version else None, "base_dir": self.base_dir, "ireq": self.ireq, "build_backend": self.build_backend, "build_requires": self.build_requires, - "requires": self.requires, + "requires": self.requires if self._requirements else None, "setup_requires": self.setup_requires, "python_requires": self.python_requires, - "extras": self.extras, + "extras": self.extras if self._extras_requirements else None, "extra_kwargs": self.extra_kwargs, "setup_cfg": self.setup_cfg, "setup_py": self.setup_py, @@ -922,9 +1300,9 @@ def from_ireq(cls, ireq, subdir=None, finder=None): import pip_shims.shims if not ireq.link: - return + return None if ireq.link.is_wheel: - return + return None if not finder: from .dependencies import get_finder @@ -980,7 +1358,7 @@ def from_ireq(cls, ireq, subdir=None, finder=None): def create(cls, base_dir, subdirectory=None, ireq=None, kwargs=None): # type: (AnyStr, Optional[AnyStr], Optional[InstallRequirement], Optional[Dict[AnyStr, AnyStr]]) -> Optional[SetupInfo] if not base_dir or base_dir is None: - return + return None creation_kwargs = {"extra_kwargs": kwargs} if not isinstance(base_dir, Path): @@ -998,5 +1376,5 @@ def create(cls, base_dir, subdirectory=None, ireq=None, kwargs=None): if ireq: creation_kwargs["ireq"] = ireq created = cls(**creation_kwargs) - created.get_info() + created.get_initial_info() return created diff --git a/pipenv/vendor/requirementslib/models/url.py b/pipenv/vendor/requirementslib/models/url.py index 8a5337ff5a..889a4bdd87 100644 --- a/pipenv/vendor/requirementslib/models/url.py +++ b/pipenv/vendor/requirementslib/models/url.py @@ -203,6 +203,12 @@ def parse(cls, url): fragment = "" if parsed_dict["fragment"] is not None: fragment = "{0}".format(parsed_dict["fragment"]) + if fragment.startswith("egg="): + name, extras = pip_shims.shims._strip_extras(name_with_extras) + fragment_name, fragment_extras = pip_shims.shims._strip_extras(fragment) + if fragment_extras and not extras: + name_with_extras = "{0}{1}".format(name, fragment_extras) + fragment = "" elif "&subdirectory" in parsed_dict["path"]: path, fragment = cls.parse_subdirectory(parsed_dict["path"]) parsed_dict["path"] = path diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 6a68d6dc6f..dd5afcbb9d 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -17,7 +17,10 @@ from packaging.markers import InvalidMarker, Marker, Op, Value, Variable from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet from packaging.version import parse as parse_version +from plette.models import Package, PackageCollection from six.moves.urllib import parse as urllib_parse +from tomlkit.container import Container +from tomlkit.items import AoT, Array, Bool, InlineTable, Item, String, Table from urllib3 import util as urllib3_util from vistir.compat import lru_cache from vistir.misc import dedup @@ -62,9 +65,13 @@ MarkerTuple = Tuple[TVariable, TOp, TValue] TRequirement = Union[PackagingRequirement, PkgResourcesRequirement] STRING_TYPE = Union[bytes, str, Text] + TOML_DICT_TYPES = Union[Container, Package, PackageCollection, Table, InlineTable] S = TypeVar("S", bytes, str, Text) +TOML_DICT_OBJECTS = (Container, Package, Table, InlineTable, PackageCollection) +TOML_DICT_NAMES = [o.__class__.__name__ for o in TOML_DICT_OBJECTS] + HASH_STRING = " --hash={0}" ALPHA_NUMERIC = r"[{0}{1}]".format(string.ascii_letters, string.digits) @@ -111,6 +118,60 @@ def create_link(link): return Link(link) +def tomlkit_value_to_python(toml_value): + # type: (Union[Array, AoT, TOML_DICT_TYPES, Item]) -> Union[List, Dict] + value_type = type(toml_value).__name__ + if ( + isinstance(toml_value, TOML_DICT_OBJECTS + (dict,)) + or value_type in TOML_DICT_NAMES + ): + return tomlkit_dict_to_python(toml_value) + elif isinstance(toml_value, AoT) or value_type == "AoT": + return [tomlkit_value_to_python(val) for val in toml_value._body] + elif isinstance(toml_value, Array) or value_type == "Array": + return [tomlkit_value_to_python(val) for val in list(toml_value)] + elif isinstance(toml_value, String) or value_type == "String": + return "{0!s}".format(toml_value) + elif isinstance(toml_value, Bool) or value_type == "Bool": + return toml_value.value + elif isinstance(toml_value, Item): + return toml_value.value + return toml_value + + +def tomlkit_dict_to_python(toml_dict): + # type: (TOML_DICT_TYPES) -> Dict + value_type = type(toml_dict).__name__ + if toml_dict is None: + raise TypeError("Invalid type NoneType when converting toml dict to python") + converted = None # type: Optional[Dict] + if isinstance(toml_dict, (InlineTable, Table)) or value_type in ( + "InlineTable", + "Table", + ): + converted = toml_dict.value + elif isinstance(toml_dict, (Package, PackageCollection)) or value_type in ( + "Package, PackageCollection" + ): + converted = toml_dict._data + if isinstance(converted, Container) or type(converted).__name__ == "Container": + converted = converted.value + elif isinstance(toml_dict, Container) or value_type == "Container": + converted = toml_dict.value + elif isinstance(toml_dict, dict): + converted = toml_dict.copy() + else: + raise TypeError( + "Invalid type for conversion: expected Container, Dict, or Table, " + "got {0!r}".format(toml_dict) + ) + if isinstance(converted, dict): + return {k: tomlkit_value_to_python(v) for k, v in converted.items()} + elif isinstance(converted, (TOML_DICT_OBJECTS)) or value_type in TOML_DICT_NAMES: + return tomlkit_dict_to_python(converted) + return converted + + def get_url_name(url): # type: (AnyStr) -> AnyStr """ @@ -142,7 +203,12 @@ def init_requirement(name): def extras_to_string(extras): # type: (Iterable[S]) -> S - """Turn a list of extras into a string""" + """Turn a list of extras into a string + + :param List[str]] extras: a list of extras to format + :return: A string of extras + :rtype: str + """ if isinstance(extras, six.string_types): if extras.startswith("["): return extras @@ -155,8 +221,11 @@ def extras_to_string(extras): def parse_extras(extras_str): # type: (AnyStr) -> List[AnyStr] - """ - Turn a string of extras into a parsed extras list + """Turn a string of extras into a parsed extras list + + :param str extras_str: An extras string + :return: A sorted list of extras + :rtype: List[str] """ from pkg_resources import Requirement @@ -167,8 +236,11 @@ def parse_extras(extras_str): def specs_to_string(specs): # type: (List[Union[STRING_TYPE, Specifier]]) -> AnyStr - """ - Turn a list of specifier tuples into a string + """Turn a list of specifier tuples into a string + + :param List[Union[Specifier, str]] specs: a list of specifiers to format + :return: A string of specifiers + :rtype: str """ if specs: @@ -212,7 +284,8 @@ def build_vcs_uri( def convert_direct_url_to_url(direct_url): # type: (AnyStr) -> AnyStr - """ + """Converts direct URLs to standard, link-style URLs + Given a direct url as defined by *PEP 508*, convert to a :class:`~pip_shims.shims.Link` compatible URL by moving the name and extras into an **egg_fragment**. @@ -253,6 +326,8 @@ def convert_direct_url_to_url(direct_url): def convert_url_to_direct_url(url, name=None): # type: (AnyStr, Optional[AnyStr]) -> AnyStr """ + Converts normal link-style URLs to direct urls. + Given a :class:`~pip_shims.shims.Link` compatible URL, convert to a direct url as defined by *PEP 508* by extracting the name and extras from the **egg_fragment**. @@ -303,7 +378,7 @@ def get_version(pipfile_entry): if str(pipfile_entry) == "{}" or is_star(pipfile_entry): return "" - elif hasattr(pipfile_entry, "keys") and "version" in pipfile_entry: + if hasattr(pipfile_entry, "keys") and "version" in pipfile_entry: if is_star(pipfile_entry.get("version")): return "" return pipfile_entry.get("version", "").strip().lstrip("(").rstrip(")") @@ -316,6 +391,8 @@ def get_version(pipfile_entry): def strip_extras_markers_from_requirement(req): # type: (TRequirement) -> TRequirement """ + Strips extras markers from requirement instances. + Given a :class:`~packaging.requirements.Requirement` instance with markers defining *extra == 'name'*, strip out the extras from the markers and return the cleaned requirement @@ -389,7 +466,6 @@ def get_pyproject(path): :return: A 2 tuple of build requirements and the build backend :rtype: Optional[Tuple[List[AnyStr], AnyStr]] """ - if not path: return from vistir.compat import Path @@ -519,8 +595,7 @@ def key_from_req(req): def _requirement_to_str_lowercase_name(requirement): - """ - Formats a packaging.requirements.Requirement with a lowercase name. + """Formats a packaging.requirements.Requirement with a lowercase name. This is simply a copy of https://github.com/pypa/packaging/blob/16.8/packaging/requirements.py#L109-L124 @@ -531,7 +606,6 @@ def _requirement_to_str_lowercase_name(requirement): important stuff that should not be lower-cased (such as the marker). See this issue for more information: https://github.com/pypa/pipenv/issues/2113. """ - parts = [requirement.name.lower()] if requirement.extras: @@ -550,11 +624,15 @@ def _requirement_to_str_lowercase_name(requirement): def format_requirement(ireq): - """ + """Formats an `InstallRequirement` instance as a string. + Generic formatter for pretty printing InstallRequirements to the terminal in a less verbose way than using its `__str__` method. - """ + :param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance. + :return: A formatted string for prettyprinting + :rtype: str + """ if ireq.editable: line = "-e {}".format(ireq.link) else: @@ -572,9 +650,13 @@ def format_requirement(ireq): def format_specifier(ireq): - """ - Generic formatter for pretty printing the specifier part of - InstallRequirements to the terminal. + """Generic formatter for pretty printing specifiers. + + Pretty-prints specifiers from InstallRequirements for output to terminal. + + :param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance. + :return: A string of specifiers in the given install requirement or + :rtype: str """ # TODO: Ideally, this is carried over to the pip library itself specs = ireq.specifier._specs if ireq.req is not None else [] @@ -583,8 +665,7 @@ def format_specifier(ireq): def get_pinned_version(ireq): - """ - Get the pinned version of an InstallRequirement. + """Get the pinned version of an InstallRequirement. An InstallRequirement is considered pinned if: @@ -602,7 +683,6 @@ def get_pinned_version(ireq): Raises `TypeError` if the input is not a valid InstallRequirement, or `ValueError` if the InstallRequirement is not pinned. """ - try: specifier = ireq.specifier except AttributeError: diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index 515f9a8842..7650d764a4 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -132,7 +132,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): # type: (S) -> S - """Cleans VCS uris from pipenv.patched.notpip format""" + """Cleans VCS uris from pip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: @@ -169,14 +169,6 @@ def is_editable(pipfile_entry): return False -def multi_split(s, split): - # type: (S, Iterable[S]) -> List[S] - """Splits on multiple given separators.""" - for r in split: - s = s.replace(r, "|") - return [i for i in s.split("|") if len(i) > 0] - - def is_star(val): # type: (PipfileType) -> bool return (isinstance(val, six.string_types) and val == "*") or ( @@ -318,30 +310,6 @@ def _ensure_dir(path): return path -@contextlib.contextmanager -def ensure_setup_py(base): - # type: (STRING_TYPE) -> Generator[None, None, None] - if not base: - base = create_tracked_tempdir(prefix="requirementslib-setup") - base_dir = Path(base) - if base_dir.exists() and base_dir.name == "setup.py": - base_dir = base_dir.parent - elif not (base_dir.exists() and base_dir.is_dir()): - base_dir = base_dir.parent - if not (base_dir.exists() and base_dir.is_dir()): - base_dir = base_dir.parent - setup_py = base_dir.joinpath("setup.py") - - is_new = False if setup_py.exists() else True - if not setup_py.exists(): - setup_py.write_text(u"") - try: - yield - finally: - if is_new: - setup_py.unlink() - - _UNSET = object() _REMAP_EXIT = object() diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index 5be3b74752..aa7831a538 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -13,6 +13,7 @@ cd, open_file, replaced_stream, + replaced_streams, spinner, temp_environ, temp_path, @@ -35,7 +36,7 @@ from .path import create_tracked_tempdir, create_tracked_tempfile, mkdir_p, rmtree from .spin import create_spinner -__version__ = "0.3.1" +__version__ = "0.4.0" __all__ = [ @@ -68,6 +69,7 @@ "get_wrapped_stream", "StreamWrapper", "replaced_stream", + "replaced_streams", "show_cursor", "hide_cursor", ] diff --git a/pipenv/vendor/vistir/_winconsole.py b/pipenv/vendor/vistir/_winconsole.py new file mode 100644 index 0000000000..8f176ddf85 --- /dev/null +++ b/pipenv/vendor/vistir/_winconsole.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- + +# This Module is taken in full from the click project +# see https://github.com/pallets/click/blob/6cafd32/click/_winconsole.py +# Copyright © 2014 by the Pallets team. + +# Some rights reserved. + +# Redistribution and use in source and binary forms of the software as well as +# documentation, with or without modification, are permitted provided that the +# following conditions are met: +# Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this +# software without specific prior written permission. + +# THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT +# NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE AND +# DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prmopt. + +import io +import os +import sys +import zlib +import time +import ctypes +import msvcrt +from ctypes import ( + byref, + POINTER, + c_int, + c_char, + c_char_p, + c_void_p, + c_ssize_t, + c_ulong, + py_object, + Structure, + windll, + WINFUNCTYPE, +) +from ctypes.wintypes import LPWSTR, LPCWSTR +from six import PY2, text_type +from .misc import StreamWrapper + +try: + from ctypes import pythonapi + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release +except ImportError: + pythonapi = None + + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetLastError = kernel32.GetLastError +GetConsoleCursorInfo = kernel32.GetConsoleCursorInfo +SetConsoleCursorInfo = kernel32.SetConsoleCursorInfo +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) + + +# XXX: Added for cursor hiding on windows +STDOUT_HANDLE_ID = ctypes.c_ulong(-11) +STDERR_HANDLE_ID = ctypes.c_ulong(-12) +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +STREAM_MAP = {0: STDIN_HANDLE, 1: STDOUT_HANDLE, 2: STDERR_HANDLE} + + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + + +class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + if PY2: + _fields_.insert(-1, ("smalltable", c_ssize_t * 2)) + + +# XXX: This was added for the use of cursors +class CONSOLE_CURSOR_INFO(Structure): + _fields_ = [("dwSize", ctypes.c_int), ("bVisible", ctypes.c_int)] + + +# On PyPy we cannot get buffers so our ability to operate here is +# serverly limited. +if pythonapi is None: + get_buffer = None +else: + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + io.RawIOBase.isatty(self) + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from " "UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + self.handle, buffer, code_units_to_be_read, byref(code_units_read), None + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError("Windows error: %s" % GetLastError()) + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return "Windows error %s" % errno + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + self.handle, buf, code_units_to_be_written, byref(code_units_written), None + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream(object): + def __init__(self, text_stream, byte_stream): + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self): + return self.buffer.name + + def write(self, x): + if isinstance(x, text_type): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __getattr__(self, name): + try: + return getattr(self._text_stream, name) + except io.UnsupportedOperation: + return getattr(self.buffer, name) + + def isatty(self): + return self.buffer.isatty() + + def __repr__(self): + return "" % (self.name, self.encoding) + + +class WindowsChunkedWriter(object): + """ + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()' which we wrap to write in + limited chunks due to a Windows limitation on binary console streams. + """ + + def __init__(self, wrapped): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + total_to_write = len(text) + written = 0 + + while written < total_to_write: + to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) + self.__wrapped.write(text[written : written + to_write]) + written += to_write + + +_wrapped_std_streams = set() + + +def _wrap_std_stream(name): + # Python 2 & Windows 7 and below + if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams: + setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) + _wrapped_std_streams.add(name) + + +def _get_text_stdin(buffer_stream): + text_stream = StreamWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stdout(buffer_stream): + text_stream = StreamWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stderr(buffer_stream): + text_stream = StreamWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return ConsoleStream(text_stream, buffer_stream) + + +if PY2: + + def _hash_py_argv(): + return zlib.crc32("\x00".join(sys.argv[1:])) + + _initial_argv_hash = _hash_py_argv() + + def _get_windows_argv(): + argc = c_int(0) + argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) + argv = [argv_unicode[i] for i in range(0, argc.value)] + + if not hasattr(sys, "frozen"): + argv = argv[1:] + while len(argv) > 0: + arg = argv[0] + if not arg.startswith("-") or arg == "-": + break + argv = argv[1:] + if arg.startswith(("-c", "-m")): + break + + return argv[1:] + + +_stream_factories = {0: _get_text_stdin, 1: _get_text_stdout, 2: _get_text_stderr} + + +def _get_windows_console_stream(f, encoding, errors): + if ( + get_buffer is not None + and encoding in ("utf-16-le", None) + and errors in ("strict", None) + and hasattr(f, "isatty") + and f.isatty() + ): + if isinstance(f, ConsoleStream): + return f + func = _stream_factories.get(f.fileno()) + if func is not None: + if not PY2: + f = getattr(f, "buffer", None) + if f is None: + return None + else: + # If we are on Python 2 we need to set the stream that we + # deal with to binary mode as otherwise the exercise if a + # bit moot. The same problems apply as for + # get_binary_stdin and friends from _compat. + msvcrt.setmode(f.fileno(), os.O_BINARY) + return func(f) + + +def hide_cursor(): + cursor_info = CONSOLE_CURSOR_INFO() + GetConsoleCursorInfo(STDOUT_HANDLE, ctypes.byref(cursor_info)) + cursor_info.visible = False + SetConsoleCursorInfo(STDOUT_HANDLE, ctypes.byref(cursor_info)) + + +def show_cursor(): + cursor_info = CONSOLE_CURSOR_INFO() + GetConsoleCursorInfo(STDOUT_HANDLE, ctypes.byref(cursor_info)) + cursor_info.visible = True + SetConsoleCursorInfo(STDOUT_HANDLE, ctypes.byref(cursor_info)) + + +def get_stream_handle(stream): + return STREAM_MAP.get(stream.fileno()) diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index f0266e304b..6c683747a4 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -42,33 +42,28 @@ if sys.version_info >= (3, 5): from pathlib import Path - from functools import lru_cache else: from pipenv.vendor.pathlib2 import Path - from pipenv.vendor.backports.functools_lru_cache import lru_cache - - -if sys.version_info < (3, 3): - from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size - NamedTemporaryFile = _NamedTemporaryFile -else: +if six.PY3: + # Only Python 3.4+ is supported + from functools import lru_cache, partialmethod from tempfile import NamedTemporaryFile from shutil import get_terminal_size - -try: from weakref import finalize -except ImportError: - from pipenv.vendor.backports.weakref import finalize # type: ignore - -try: - from functools import partialmethod -except Exception: +else: + # Only Python 2.7 is supported + from pipenv.vendor.backports.functools_lru_cache import lru_cache from .backports.functools import partialmethod # type: ignore + from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + + NamedTemporaryFile = _NamedTemporaryFile + from pipenv.vendor.backports.weakref import finalize # type: ignore try: + # Introduced Python 3.5 from json import JSONDecodeError -except ImportError: # Old Pythons. +except ImportError: JSONDecodeError = ValueError # type: ignore if six.PY2: @@ -205,6 +200,20 @@ def cleanup(self): self._rmtree(self.name) +def is_bytes(string): + """Check if a string is a bytes instance + + :param Union[str, bytes] string: A string that may be string or bytes like + :return: Whether the provided string is a bytes type or not + :rtype: bool + """ + if six.PY3 and isinstance(string, (bytes, memoryview, bytearray)): # noqa + return True + elif six.PY2 and isinstance(string, (buffer, bytearray)): # noqa + return True + return False + + def fs_str(string): """Encodes a string into the proper filesystem encoding diff --git a/pipenv/vendor/vistir/contextmanagers.py b/pipenv/vendor/vistir/contextmanagers.py index d9223b6682..49ec964fe1 100644 --- a/pipenv/vendor/vistir/contextmanagers.py +++ b/pipenv/vendor/vistir/contextmanagers.py @@ -21,6 +21,7 @@ "spinner", "dummy_spinner", "replaced_stream", + "replaced_streams", ] @@ -316,6 +317,7 @@ def replaced_stream(stream_name): >>> sys.stdout.write("hello") 'hello' """ + orig_stream = getattr(sys, stream_name) new_stream = six.StringIO() try: diff --git a/pipenv/vendor/vistir/cursor.py b/pipenv/vendor/vistir/cursor.py index 22d643e13a..bdb281f6d3 100644 --- a/pipenv/vendor/vistir/cursor.py +++ b/pipenv/vendor/vistir/cursor.py @@ -1,19 +1,10 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function -import ctypes import os import sys -__all__ = ["hide_cursor", "show_cursor"] - - -class CONSOLE_CURSOR_INFO(ctypes.Structure): - _fields_ = [("dwSize", ctypes.c_int), ("bVisible", ctypes.c_int)] - - -WIN_STDERR_HANDLE_ID = ctypes.c_ulong(-12) -WIN_STDOUT_HANDLE_ID = ctypes.c_ulong(-11) +__all__ = ["hide_cursor", "show_cursor", "get_stream_handle"] def get_stream_handle(stream=sys.stdout): @@ -26,10 +17,9 @@ def get_stream_handle(stream=sys.stdout): """ handle = stream if os.name == "nt": - from ctypes import windll + from ._winconsole import get_stream_handle as get_win_stream_handle - handle_id = WIN_STDOUT_HANDLE_ID - handle = windll.kernel32.GetStdHandle(handle_id) + return get_win_stream_handle(stream) return handle @@ -44,12 +34,9 @@ def hide_cursor(stream=sys.stdout): handle = get_stream_handle(stream=stream) if os.name == "nt": - from ctypes import windll + from ._winconsole import hide_cursor - cursor_info = CONSOLE_CURSOR_INFO() - windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(cursor_info)) - cursor_info.visible = False - windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(cursor_info)) + hide_cursor() else: handle.write("\033[?25l") handle.flush() @@ -66,12 +53,9 @@ def show_cursor(stream=sys.stdout): handle = get_stream_handle(stream=stream) if os.name == "nt": - from ctypes import windll + from ._winconsole import show_cursor - cursor_info = CONSOLE_CURSOR_INFO() - windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(cursor_info)) - cursor_info.visible = True - windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(cursor_info)) + show_cursor() else: handle.write("\033[?25h") handle.flush() diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index fe88dc1fd1..63f7dc5bfa 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -11,12 +11,22 @@ from collections import OrderedDict from functools import partial from itertools import islice, tee +from weakref import WeakKeyDictionary import six from .cmdparse import Script -from .compat import Iterable, Path, StringIO, fs_str, partialmethod, to_native_string +from .compat import ( + Iterable, + Path, + StringIO, + fs_str, + is_bytes, + partialmethod, + to_native_string, +) from .contextmanagers import spinner as spinner +from .termcolors import ANSI_REMOVAL_RE, colorize if os.name != "nt": @@ -514,7 +524,7 @@ def chunked(n, iterable): try: - locale_encoding = locale.getdefaultencoding()[1] or "ascii" + locale_encoding = locale.getdefaultlocale()[1] or "ascii" except Exception: locale_encoding = "ascii" @@ -617,20 +627,47 @@ def get_canonical_encoding_name(name): return codec.name -def get_wrapped_stream(stream): +def _is_binary_buffer(stream): + try: + stream.write(b"") + except Exception: + try: + stream.write("") + except Exception: + pass + return False + return True + + +def _get_binary_buffer(stream): + if six.PY3 and not _is_binary_buffer(stream): + stream = getattr(stream, "buffer", None) + if stream is not None and _is_binary_buffer(stream): + return stream + return stream + + +def get_wrapped_stream(stream, encoding=None, errors="replace"): """ Given a stream, wrap it in a `StreamWrapper` instance and return the wrapped stream. :param stream: A stream instance to wrap + :param str encoding: The encoding to use for the stream + :param str errors: The error handler to use, default "replace" :returns: A new, wrapped stream :rtype: :class:`StreamWrapper` """ if stream is None: raise TypeError("must provide a stream to wrap") - encoding = getattr(stream, "encoding", None) - encoding = get_output_encoding(encoding) - return StreamWrapper(stream, encoding, "replace", line_buffering=True) + stream = _get_binary_buffer(stream) + if stream is not None and encoding is None: + encoding = "utf-8" + if not encoding: + encoding = get_output_encoding(stream) + else: + encoding = get_canonical_encoding_name(encoding) + return StreamWrapper(stream, encoding, errors, line_buffering=True) class StreamWrapper(io.TextIOWrapper): @@ -656,9 +693,26 @@ def write(self, x): self.flush() except Exception: pass + # This is modified from the initial implementation to rely on + # our own decoding functionality to preserve unicode strings where + # possible return self.buffer.write(str(x)) return io.TextIOWrapper.write(self, x) + else: + + def write(self, x): + # try to use backslash and surrogate escape strategies before failing + old_errors = getattr(self, "_errors", self.errors) + self._errors = ( + "backslashescape" if self.encoding != "mbcs" else "surrogateescape" + ) + try: + return io.TextIOWrapper.write(self, to_text(x, errors=self._errors)) + except UnicodeDecodeError: + self._errors = old_errors + return io.TextIOWrapper.write(self, to_text(x, errors=self._errors)) + def writelines(self, lines): for line in lines: self.write(line) @@ -720,3 +774,201 @@ def seekable(self): except Exception: return False return True + + +# XXX: The approach here is inspired somewhat by click with details taken from various +# XXX: other sources. Specifically we are using a stream cache and stream wrapping +# XXX: techniques from click (loosely inspired for the most part, with many details) +# XXX: heavily modified to suit our needs + + +def _isatty(stream): + try: + is_a_tty = stream.isatty() + except Exception: + is_a_tty = False + return is_a_tty + + +_wrap_for_color = None + +try: + import colorama +except ImportError: + colorama = None + +_color_stream_cache = WeakKeyDictionary() + +if os.name == "nt" or sys.platform.startswith("win"): + + def _wrap_for_color(stream, allow_color=True): + if colorama is not None: + try: + cached = _color_stream_cache.get(stream) + except KeyError: + cached = None + if cached is not None: + return cached + if not _isatty(stream): + allow_color = False + _color_wrapper = colorama.AnsiToWin32(stream, strip=not allow_color) + result = _color_wrapper.stream + _write = result.write + + def _write_with_color(s): + try: + return _write(s) + except Exception: + _color_wrapper.reset_all() + raise + + result.write = _write_with_color + try: + _color_stream_cache[stream] = result + except Exception: + pass + return result + + return stream + + +def _cached_stream_lookup(stream_lookup_func, stream_resolution_func): + stream_cache = WeakKeyDictionary() + + def lookup(): + stream = stream_lookup_func() + result = None + if stream in stream_cache: + result = stream_cache.get(stream, None) + if result is not None: + return result + result = stream_resolution_func() + try: + stream = stream_lookup_func() + stream_cache[stream] = result + except Exception: + pass + return result + + return lookup + + +def get_text_stream(stream="stdout", encoding=None, allow_color=True): + """Retrieve a unicode stream wrapper around **sys.stdout** or **sys.stderr**. + + :param str stream: The name of the stream to wrap from the :mod:`sys` module. + :param str encoding: An optional encoding to use. + :return: A new :class:`~vistir.misc.StreamWrapper` instance around the stream + :rtype: `vistir.misc.StreamWrapper` + """ + + stream_map = {"stdin": sys.stdin, "stdout": sys.stdout, "stderr": sys.stderr} + if os.name == "nt" or sys.platform.startswith("win"): + from ._winconsole import _get_windows_console_stream, _wrap_std_stream + + else: + _get_windows_console_stream = lambda *args: None # noqa + _wrap_std_stream = lambda *args: None # noqa + + if six.PY2 and stream != "stdin": + _wrap_std_stream(stream) + sys_stream = stream_map[stream] + windows_console = _get_windows_console_stream(sys_stream, encoding, None) + if windows_console is not None: + return windows_console + return get_wrapped_stream(sys_stream, encoding) + + +def get_text_stdout(): + return get_text_stream("stdout") + + +def get_text_stderr(): + return get_text_stream("stderr") + + +def get_text_stdin(): + return get_text_stream("stdin") + + +TEXT_STREAMS = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} + + +_text_stdin = _cached_stream_lookup(lambda: sys.stdin, get_text_stdin) +_text_stdout = _cached_stream_lookup(lambda: sys.stdout, get_text_stdout) +_text_stderr = _cached_stream_lookup(lambda: sys.stderr, get_text_stderr) + + +def replace_with_text_stream(stream_name): + """Given a stream name, replace the target stream with a text-converted equivalent + + :param str stream_name: The name of a target stream, such as **stdout** or **stderr** + :return: None + """ + new_stream = TEXT_STREAMS.get(stream_name) + if new_stream is not None: + new_stream = new_stream() + setattr(sys, stream_name, new_stream) + return None + + +def _can_use_color(stream=None, fg=None, bg=None, style=None): + if not any([fg, bg, style]): + if not stream: + stream = sys.stdin + return _isatty(stream) + return any([fg, bg, style]) + + +def echo(text, fg=None, bg=None, style=None, file=None, err=False): + """Write the given text to the provided stream or **sys.stdout** by default. + + Provides optional foreground and background colors from the ansi defaults: + **grey**, **red**, **green**, **yellow**, **blue**, **magenta**, **cyan** + or **white**. + + Available styles include **bold**, **dark**, **underline**, **blink**, **reverse**, + **concealed** + + :param str text: Text to write + :param str fg: Foreground color to use (default: None) + :param str bg: Foreground color to use (default: None) + :param str style: Style to use (default: None) + :param stream file: File to write to (default: None) + """ + + if file and not hasattr(file, "write"): + raise TypeError("Expected a writable stream, received {0!r}".format(file)) + if not file: + if err: + file = _text_stderr() + else: + file = _text_stdout() + if text and not isinstance(text, (six.string_types, bytes, bytearray)): + text = six.text_type(text) + text = "" if not text else text + if isinstance(text, six.text_type): + text += "\n" + else: + text += b"\n" + if text and six.PY3 and is_bytes(text): + buffer = _get_binary_buffer(file) + if buffer is not None: + file.flush() + buffer.write(text) + buffer.flush() + return + if text and not is_bytes(text): + can_use_color = _can_use_color(file, fg=fg, bg=bg, style=style) + if os.name == "nt": + text = colorize(text, fg=fg, bg=bg, attrs=style) + file = _wrap_for_color(file, allow_color=can_use_color) + elif not can_use_color: + text = ANSI_REMOVAL_RE.sub("", text) + if text: + file.write(text) + file.flush() diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index e9370c8dec..71d36f1c6a 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -33,7 +33,6 @@ if IS_TYPE_CHECKING: from typing import Optional, Callable, Text, ByteString, AnyStr - __all__ = [ "check_for_unc_path", "get_converted_relative_path", @@ -423,16 +422,17 @@ def handle_remove_readonly(func, path, exc): try: func(path) except (OSError, IOError, FileNotFoundError, PermissionError) as e: - if e.errno == errno.ENOENT: - return - elif e.errno in PERM_ERRORS: + if e.errno in PERM_ERRORS: + if e.errno == errno.ENOENT: + return remaining = None if os.path.isdir(path): - remaining =_wait_for_files(path) + remaining = _wait_for_files(path) if remaining: warnings.warn(default_warning_message.format(path), ResourceWarning) + else: + func(path, ignore_errors=True) return - raise if exc_exception.errno in PERM_ERRORS: set_write_bit(path) @@ -441,16 +441,9 @@ def handle_remove_readonly(func, path, exc): func(path) except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno in PERM_ERRORS: - warnings.warn(default_warning_message.format(path), ResourceWarning) - pass - elif e.errno == errno.ENOENT: # File already gone - pass - else: - raise - else: + if e.errno != errno.ENOENT: # File still exists + warnings.warn(default_warning_message.format(path), ResourceWarning) return - elif exc_exception.errno == errno.ENOENT: - pass else: raise exc_exception diff --git a/pipenv/vendor/vistir/termcolors.py b/pipenv/vendor/vistir/termcolors.py index 6aecec887b..27b5ff4459 100644 --- a/pipenv/vendor/vistir/termcolors.py +++ b/pipenv/vendor/vistir/termcolors.py @@ -2,8 +2,10 @@ from __future__ import absolute_import, print_function, unicode_literals import os +import re import colorama +import six from .compat import to_native_string @@ -12,44 +14,14 @@ ) -ATTRIBUTES = dict( - list( - zip( - ["bold", "dark", "", "underline", "blink", "", "reverse", "concealed"], - list(range(1, 9)), - ) - ) -) +ATTRIBUTE_NAMES = ["bold", "dark", "", "underline", "blink", "", "reverse", "concealed"] +ATTRIBUTES = dict(zip(ATTRIBUTE_NAMES, range(1, 9))) del ATTRIBUTES[""] - -HIGHLIGHTS = dict( - list( - zip( - [ - "on_grey", - "on_red", - "on_green", - "on_yellow", - "on_blue", - "on_magenta", - "on_cyan", - "on_white", - ], - list(range(40, 48)), - ) - ) -) - - -COLORS = dict( - list( - zip( - ["grey", "red", "green", "yellow", "blue", "magenta", "cyan", "white"], - list(range(30, 38)), - ) - ) -) +colors = ["grey", "red", "green", "yellow", "blue", "magenta", "cyan", "white"] +COLORS = dict(zip(colors, range(30, 38))) +HIGHLIGHTS = dict(zip(["on_{0}".format(c) for c in colors], range(40, 48))) +ANSI_REMOVAL_RE = re.compile(r"\033\[((?:\d|;)*)([a-zA-Z])") COLOR_MAP = { @@ -99,25 +71,36 @@ def colored(text, color=None, on_color=None, attrs=None): colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink']) colored('Hello, World!', 'green') """ + return colorize(text, fg=color, bg=on_color, attrs=attrs) + + +def colorize(text, fg=None, bg=None, attrs=None): if os.getenv("ANSI_COLORS_DISABLED") is None: style = "NORMAL" - if "bold" in attrs: + if attrs is not None and not isinstance(attrs, list): + _attrs = [] + if isinstance(attrs, six.string_types): + _attrs.append(attrs) + else: + _attrs = list(attrs) + attrs = _attrs + if attrs and "bold" in attrs: style = "BRIGHT" attrs.remove("bold") - if color is not None: - color = color.upper() + if fg is not None: + fg = fg.upper() text = to_native_string("%s%s%s%s%s") % ( - to_native_string(getattr(colorama.Fore, color)), + to_native_string(getattr(colorama.Fore, fg)), to_native_string(getattr(colorama.Style, style)), to_native_string(text), to_native_string(colorama.Fore.RESET), to_native_string(colorama.Style.NORMAL), ) - if on_color is not None: - on_color = on_color.upper() + if bg is not None: + bg = bg.upper() text = to_native_string("%s%s%s%s") % ( - to_native_string(getattr(colorama.Back, on_color)), + to_native_string(getattr(colorama.Back, bg)), to_native_string(text), to_native_string(colorama.Back.RESET), to_native_string(colorama.Style.NORMAL), @@ -129,6 +112,8 @@ def colored(text, color=None, on_color=None, attrs=None): text = fmt_str % (ATTRIBUTES[attr], text) text += RESET + else: + text = ANSI_REMOVAL_RE.sub("", text) return text diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 766291e835..f204cca309 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -155,10 +155,12 @@ def isolate(create_tmpdir): home_dir = os.path.join(str(create_tmpdir()), "home") os.makedirs(home_dir) mkdir_p(os.path.join(home_dir, ".config", "git")) - with open(os.path.join(home_dir, ".config", "git", "config"), "wb") as fp: + git_config_file = os.path.join(home_dir, ".config", "git", "config") + with open(git_config_file, "wb") as fp: fp.write( b"[user]\n\tname = pipenv\n\temail = pipenv@pipenv.org\n" ) + os.environ["GIT_CONFIG"] = fs_str(git_config_file) os.environ["GIT_CONFIG_NOSYSTEM"] = fs_str("1") os.environ["GIT_AUTHOR_NAME"] = fs_str("pipenv") os.environ["GIT_AUTHOR_EMAIL"] = fs_str("pipenv@pipenv.org") From 1006b50bcd6e7dbb1978bfd0774ef670c16a1986 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 18 Apr 2019 17:42:48 -0400 Subject: [PATCH 17/81] Fix marker handling Signed-off-by: Dan Ryan --- .../vendor/requirementslib/models/markers.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 8bf8656ed2..79193b74a6 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -19,12 +19,19 @@ if MYPY_RUNNING: - from typing import Optional, List + from typing import Optional, List, Generic, Type MAX_VERSIONS = {2: 7, 3: 10} +def is_instance(item, cls): + # type: (Generic, Type) -> bool + if isinstance(item, cls) or item.__class__.__name__ == cls.__name__: + return True + return False + + @attr.s class PipenvMarkers(object): """System-level requirements - see PEP508 for more detail""" @@ -158,7 +165,7 @@ def _format_pyspec(specifier): def _get_specs(specset): if specset is None: return - if isinstance(specset, Specifier) or not _is_iterable(specset): + if is_instance(specset, Specifier): new_specset = SpecifierSet() specs = set() specs.add(specset) @@ -278,7 +285,7 @@ def get_versions(specset, group_by_operator=True): def _ensure_marker(marker): - if not isinstance(marker, Marker): + if not is_instance(marker, Marker): return Marker(str(marker)) return marker @@ -498,7 +505,9 @@ def get_specset(marker_list): else: specset.add(Specifier("{0}{1}".format(op.value, value.value))) elif isinstance(marker_parts, list): - specset.update(get_specset(marker_parts)) + parts = get_specset(marker_parts) + if parts: + specset.update(parts) elif isinstance(marker_parts, str): _last_str = marker_parts specifiers = SpecifierSet() @@ -583,7 +592,7 @@ def normalize_marker_str(marker): marker_str = "" if not marker: return None - if not isinstance(marker, Marker): + if not is_instance(marker, Marker): marker = _ensure_marker(marker) pyversion = get_contained_pyversions(marker) marker = get_without_pyversion(marker) From 55f49d55f893f70e31bc4101bb1ec090515e2e89 Mon Sep 17 00:00:00 2001 From: Kenneth Reitz Date: Sun, 21 Apr 2019 09:05:21 -0400 Subject: [PATCH 18/81] Update Dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c6b6fb7db3..5b27a49c1b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ ONBUILD COPY Pipfile Pipfile ONBUILD COPY Pipfile.lock Pipfile.lock # -- Install dependencies: -ONBUILD RUN set -ex && pipenv install --deploy --system +ONBUILD RUN set -ex && pipenv install --deploy --system --sequential # -------------------- # - Using This File: - From ab12b4b83e3052250565936e39d9b41929b6c768 Mon Sep 17 00:00:00 2001 From: Kenneth Reitz Date: Sun, 21 Apr 2019 14:16:43 -0400 Subject: [PATCH 19/81] Update Dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 5b27a49c1b..c6b6fb7db3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ ONBUILD COPY Pipfile Pipfile ONBUILD COPY Pipfile.lock Pipfile.lock # -- Install dependencies: -ONBUILD RUN set -ex && pipenv install --deploy --system --sequential +ONBUILD RUN set -ex && pipenv install --deploy --system # -------------------- # - Using This File: - From 1ef7b1373d1f669966f7133474431c142dd2cd6c Mon Sep 17 00:00:00 2001 From: Kenneth Reitz Date: Sun, 21 Apr 2019 14:22:44 -0400 Subject: [PATCH 20/81] Update Dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c6b6fb7db3..7f84dc1254 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ ENV LC_ALL C.UTF-8 ENV LANG C.UTF-8 # -- Install Pipenv: -RUN apt update && apt upgrade -y && apt install python3.7-dev -y +RUN apt update && apt upgrade -y && apt install python3.7-dev libffi-dev -y RUN curl --silent https://bootstrap.pypa.io/get-pip.py | python3.7 # Backwards compatility. From 0414287af361d8bb4a8e4a157e34a9ef571824d1 Mon Sep 17 00:00:00 2001 From: Kenneth Reitz Date: Mon, 22 Apr 2019 08:15:36 -0400 Subject: [PATCH 21/81] Update Dockerfile --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 7f84dc1254..0aae379c55 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,6 +3,8 @@ FROM heroku/heroku:18-build ENV DEBIAN_FRONTEND noninteractive ENV LC_ALL C.UTF-8 ENV LANG C.UTF-8 +# Python, don't write bytecode! +ENV PYTHONDONTWRITEBYTECODE 1 # -- Install Pipenv: RUN apt update && apt upgrade -y && apt install python3.7-dev libffi-dev -y From 459c678e61560daed09db32711817b1feebb528a Mon Sep 17 00:00:00 2001 From: Hong Xu Date: Mon, 22 Apr 2019 15:10:00 -0700 Subject: [PATCH 22/81] Add installation instructions for Debian Buster+ in README --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 9c6fd39ac9..2fe92ce931 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,10 @@ If you\'re on MacOS, you can install Pipenv easily with Homebrew: $ brew install pipenv +Or, if you\'re using Debian Buster+: + + $ sudo apt install pipenv + Or, if you\'re using Fedora 28: $ sudo dnf install pipenv From 2d23ce5fce4a0cde2bb6cda8eb32a56d7b62ea64 Mon Sep 17 00:00:00 2001 From: Hong Xu Date: Mon, 22 Apr 2019 15:14:38 -0700 Subject: [PATCH 23/81] Add changelog for 3711. --- news/3711.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/3711.trivial.rst diff --git a/news/3711.trivial.rst b/news/3711.trivial.rst new file mode 100644 index 0000000000..48c531b210 --- /dev/null +++ b/news/3711.trivial.rst @@ -0,0 +1 @@ +Add installation instructions for Debian Buster+ in README From bddcde3bfbba04612f2a1b4244c6f0ce557826f7 Mon Sep 17 00:00:00 2001 From: Alexandre Morignot Date: Fri, 26 Apr 2019 11:28:32 +0200 Subject: [PATCH 24/81] Fix documentation install page link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2fe92ce931..74067d58e4 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ Or, if you\'re using FreeBSD: # pkg install py36-pipenv -Otherwise, refer to the [documentation](https://docs.pipenv.org/install/) for instructions. +Otherwise, refer to the [documentation](https://docs.pipenv.org/en/latest/install/#installing-pipenv) for instructions. ✨🍰✨ From 6cf692d28486ef167436b62f0fce215a2dcaf39a Mon Sep 17 00:00:00 2001 From: Greg Kaleka Date: Fri, 26 Apr 2019 09:13:41 -0700 Subject: [PATCH 25/81] Fix broken documentation links in CONTRIBUTING.md --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6e3ef65c53..03c1825e1d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,8 +3,8 @@ Before opening any issues or proposing any pull requests, please do the following: -1. Read our [Contributor's Guide](https://docs.pipenv.org/dev/contributing/). -2. Understand our [development philosophy](https://docs.pipenv.org/dev/philosophy/). +1. Read our [Contributor's Guide](https://docs.pipenv.org/en/latest/dev/contributing/). +2. Understand our [development philosophy](https://docs.pipenv.org/en/latest/dev/philosophy/). To get the greatest chance of helpful responses, please also observe the following additional notes. From 615219975288c1c6dace4da2b1c6b85e429b4fc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= Date: Fri, 3 May 2019 12:18:03 +0200 Subject: [PATCH 26/81] Pytest 4: Update to get_closest_marker See https://docs.pytest.org/en/latest/mark.html#update-marker-code --- news/3724.trivial.rst | 1 + tests/integration/conftest.py | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 news/3724.trivial.rst diff --git a/news/3724.trivial.rst b/news/3724.trivial.rst new file mode 100644 index 0000000000..63a550133c --- /dev/null +++ b/news/3724.trivial.rst @@ -0,0 +1 @@ +Update pytest configuration to support pytest 4. diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 766291e835..a687abb7c5 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -88,21 +88,21 @@ def check_for_mercurial(): def pytest_runtest_setup(item): - if item.get_marker('needs_internet') is not None and not WE_HAVE_INTERNET: + if item.get_closest_marker('needs_internet') is not None and not WE_HAVE_INTERNET: pytest.skip('requires internet') - if item.get_marker('needs_github_ssh') is not None and not WE_HAVE_GITHUB_SSH_KEYS: + if item.get_closest_marker('needs_github_ssh') is not None and not WE_HAVE_GITHUB_SSH_KEYS: pytest.skip('requires github ssh') - if item.get_marker('needs_hg') is not None and not WE_HAVE_HG: + if item.get_closest_marker('needs_hg') is not None and not WE_HAVE_HG: pytest.skip('requires mercurial') - if item.get_marker('skip_py27_win') is not None and ( + if item.get_closest_marker('skip_py27_win') is not None and ( sys.version_info[:2] <= (2, 7) and os.name == "nt" ): pytest.skip('must use python > 2.7 on windows') - if item.get_marker('py3_only') is not None and ( + if item.get_closest_marker('py3_only') is not None and ( sys.version_info < (3, 0) ): pytest.skip('test only runs on python 3') - if item.get_marker('lte_py36') is not None and ( + if item.get_closest_marker('lte_py36') is not None and ( sys.version_info >= (3, 7) ): pytest.skip('test only runs on python < 3.7') From d79ae0c270a16705f2d45c07068c81f0cf762ba2 Mon Sep 17 00:00:00 2001 From: John Vandenberg Date: Thu, 4 Apr 2019 09:19:27 +0700 Subject: [PATCH 27/81] Remove unused vendored shutilwhich This package is no longer used. Closes https://github.com/pypa/pipenv/issues/3621 --- news/3621.trivial.rst | 1 + pipenv/vendor/shutilwhich/LICENSE | 292 -------------------------- pipenv/vendor/shutilwhich/__init__.py | 12 -- pipenv/vendor/shutilwhich/lib.py | 58 ----- pipenv/vendor/vendor.txt | 1 - tasks/vendoring/__init__.py | 1 - 6 files changed, 1 insertion(+), 364 deletions(-) create mode 100644 news/3621.trivial.rst delete mode 100644 pipenv/vendor/shutilwhich/LICENSE delete mode 100644 pipenv/vendor/shutilwhich/__init__.py delete mode 100644 pipenv/vendor/shutilwhich/lib.py diff --git a/news/3621.trivial.rst b/news/3621.trivial.rst new file mode 100644 index 0000000000..4d38a31ef9 --- /dev/null +++ b/news/3621.trivial.rst @@ -0,0 +1 @@ +Removed unused vendored package shutilwhich diff --git a/pipenv/vendor/shutilwhich/LICENSE b/pipenv/vendor/shutilwhich/LICENSE deleted file mode 100644 index 41ec01dbf4..0000000000 --- a/pipenv/vendor/shutilwhich/LICENSE +++ /dev/null @@ -1,292 +0,0 @@ -shutilwhich contains code from Python 3.3 and uses the same license: - -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.2 2.1.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2.1 2.2 2002 PSF yes - 2.2.2 2.2.1 2002 PSF yes - 2.2.3 2.2.2 2003 PSF yes - 2.3 2.2.2 2002-2003 PSF yes - 2.3.1 2.3 2002-2003 PSF yes - 2.3.2 2.3.1 2002-2003 PSF yes - 2.3.3 2.3.2 2002-2003 PSF yes - 2.3.4 2.3.3 2004 PSF yes - 2.3.5 2.3.4 2005 PSF yes - 2.4 2.3 2004 PSF yes - 2.4.1 2.4 2005 PSF yes - 2.4.2 2.4.1 2005 PSF yes - 2.4.3 2.4.2 2006 PSF yes - 2.4.4 2.4.3 2006 PSF yes - 2.5 2.4 2006 PSF yes - 2.5.1 2.5 2007 PSF yes - 2.5.2 2.5.1 2008 PSF yes - 2.5.3 2.5.2 2008 PSF yes - 2.6 2.5 2008 PSF yes - 2.6.1 2.6 2008 PSF yes - 2.6.2 2.6.1 2009 PSF yes - 2.6.3 2.6.2 2009 PSF yes - 2.6.4 2.6.3 2009 PSF yes - 2.6.5 2.6.4 2010 PSF yes - 3.0 2.6 2008 PSF yes - 3.0.1 3.0 2009 PSF yes - 3.1 3.0.1 2009 PSF yes - 3.1.1 3.1 2009 PSF yes - 3.1.2 3.1.1 2010 PSF yes - 3.1.3 3.1.2 2010 PSF yes - 3.1.4 3.1.3 2011 PSF yes - 3.2 3.1 2011 PSF yes - 3.2.1 3.2 2011 PSF yes - 3.2.2 3.2.1 2011 PSF yes - 3.2.3 3.2.2 2012 PSF yes - 3.3.0 3.2 2012 PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/shutilwhich/__init__.py b/pipenv/vendor/shutilwhich/__init__.py deleted file mode 100644 index 7152881285..0000000000 --- a/pipenv/vendor/shutilwhich/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -__version__ = '1.1.0' - -import shutil - -if not hasattr(shutil, 'which'): - from .lib import which - shutil.which = which -else: - from shutil import which diff --git a/pipenv/vendor/shutilwhich/lib.py b/pipenv/vendor/shutilwhich/lib.py deleted file mode 100644 index 6451083ae1..0000000000 --- a/pipenv/vendor/shutilwhich/lib.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import sys - - -# Everything below this point has been copied verbatim from the Python-3.3 -# sources. -def which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. - - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - """ - # Check that a given file can be accessed with the correct mode. - # Additionally check that `file` is not a directory, as on Windows - # directories pass the os.access check. - def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) - - # Short circuit. If we're given a full path which matches the mode - # and it exists, we're done here. - if _access_check(cmd, mode): - return cmd - - path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep) - - if sys.platform == "win32": - # The current directory takes precedence on Windows. - if not os.curdir in path: - path.insert(0, os.curdir) - - # PATHEXT is necessary to check on Windows. - pathext = os.environ.get("PATHEXT", "").split(os.pathsep) - # See if the given file matches any of the expected path extensions. - # This will allow us to short circuit when given "python.exe". - matches = [cmd for ext in pathext if cmd.lower().endswith(ext.lower())] - # If it does match, only test that one, otherwise we have to try - # others. - files = [cmd] if matches else [cmd + ext.lower() for ext in pathext] - else: - # On other platforms you don't have things like PATHEXT to tell you - # what file suffixes are executable, so just pass on cmd as-is. - files = [cmd] - - seen = set() - for dir in path: - dir = os.path.normcase(dir) - if not dir in seen: - seen.add(dir) - for thefile in files: - name = os.path.join(dir, thefile) - if _access_check(name, mode): - return name - return None diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index f1fe99c061..8e04471682 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -37,7 +37,6 @@ requirementslib==1.4.2 shellingham==1.2.8 six==1.12.0 semver==2.8.1 -shutilwhich==1.1.0 toml==0.10.0 cached-property==1.5.1 vistir==0.3.1 diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index dce9e5a75b..f4494e5360 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -51,7 +51,6 @@ 'click-didyoumean': 'https://raw.githubusercontent.com/click-contrib/click-didyoumean/master/LICENSE', 'click-completion': 'https://raw.githubusercontent.com/click-contrib/click-completion/master/LICENSE', 'blindspin': 'https://raw.githubusercontent.com/kennethreitz/delegator.py/master/LICENSE', - 'shutilwhich': 'https://raw.githubusercontent.com/mbr/shutilwhich/master/LICENSE', 'parse': 'https://raw.githubusercontent.com/techalchemy/parse/master/LICENSE', 'semver': 'https://raw.githubusercontent.com/k-bx/python-semver/master/LICENSE.txt', 'crayons': 'https://raw.githubusercontent.com/kennethreitz/crayons/master/LICENSE', From 2481471b7dc15d8633d9b8437f4d53c6ef010167 Mon Sep 17 00:00:00 2001 From: John Vandenberg Date: Thu, 4 Apr 2019 09:19:27 +0700 Subject: [PATCH 28/81] Remove unused vendored blindspin This package is no longer used. Closes https://github.com/pypa/pipenv/issues/3640 --- news/3640.trivial.rst | 1 + pipenv/vendor/blindspin/LICENSE | 21 --------- pipenv/vendor/blindspin/__init__.py | 73 ----------------------------- pipenv/vendor/vendor.txt | 1 - tasks/vendoring/__init__.py | 1 - 5 files changed, 1 insertion(+), 96 deletions(-) create mode 100644 news/3640.trivial.rst delete mode 100644 pipenv/vendor/blindspin/LICENSE delete mode 100644 pipenv/vendor/blindspin/__init__.py diff --git a/news/3640.trivial.rst b/news/3640.trivial.rst new file mode 100644 index 0000000000..eb9b718d69 --- /dev/null +++ b/news/3640.trivial.rst @@ -0,0 +1 @@ +Removed unused vendored package blindspin diff --git a/pipenv/vendor/blindspin/LICENSE b/pipenv/vendor/blindspin/LICENSE deleted file mode 100644 index 00bf847d1b..0000000000 --- a/pipenv/vendor/blindspin/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright 2018 Kenneth Reitz - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pipenv/vendor/blindspin/__init__.py b/pipenv/vendor/blindspin/__init__.py deleted file mode 100644 index a1230e831c..0000000000 --- a/pipenv/vendor/blindspin/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys -import threading -import time -import itertools - - -class Spinner(object): - spinner_cycle = itertools.cycle(u'⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏') - - def __init__(self, beep=False, force=False): - self.beep = beep - self.force = force - self.stop_running = None - self.spin_thread = None - - def start(self): - if sys.stdout.isatty() or self.force: - self.stop_running = threading.Event() - self.spin_thread = threading.Thread(target=self.init_spin) - self.spin_thread.start() - - def stop(self): - if self.spin_thread: - self.stop_running.set() - self.spin_thread.join() - - def init_spin(self): - while not self.stop_running.is_set(): - next_val = next(self.spinner_cycle) - if sys.version_info[0] == 2: - next_val = next_val.encode('utf-8') - sys.stdout.write(next_val) - sys.stdout.flush() - time.sleep(0.07) - sys.stdout.write('\b') - - def __enter__(self): - self.start() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.stop() - if self.beep: - sys.stdout.write('\7') - sys.stdout.flush() - return False - - -def spinner(beep=False, force=False): - """This function creates a context manager that is used to display a - spinner on stdout as long as the context has not exited. - - The spinner is created only if stdout is not redirected, or if the spinner - is forced using the `force` parameter. - - Parameters - ---------- - beep : bool - Beep when spinner finishes. - force : bool - Force creation of spinner even when stdout is redirected. - - Example - ------- - - with spinner(): - do_something() - do_something_else() - - """ - return Spinner(beep, force) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index f1fe99c061..9eb56415c2 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -1,7 +1,6 @@ appdirs==1.4.3 backports.shutil_get_terminal_size==1.0.0 backports.weakref==1.0.post1 -blindspin==2.0.1 click==7.0 click-completion==0.5.0 click-didyoumean==0.0.3 diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index dce9e5a75b..1d06f48917 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -50,7 +50,6 @@ 'delegator.py': 'https://raw.githubusercontent.com/kennethreitz/delegator.py/master/LICENSE', 'click-didyoumean': 'https://raw.githubusercontent.com/click-contrib/click-didyoumean/master/LICENSE', 'click-completion': 'https://raw.githubusercontent.com/click-contrib/click-completion/master/LICENSE', - 'blindspin': 'https://raw.githubusercontent.com/kennethreitz/delegator.py/master/LICENSE', 'shutilwhich': 'https://raw.githubusercontent.com/mbr/shutilwhich/master/LICENSE', 'parse': 'https://raw.githubusercontent.com/techalchemy/parse/master/LICENSE', 'semver': 'https://raw.githubusercontent.com/k-bx/python-semver/master/LICENSE.txt', From 90c2c66dc89ab72ff1b03b6f01fe80d1a30d571a Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 11 May 2019 01:30:25 -0400 Subject: [PATCH 29/81] Update pythonfinder, requirementslib and vistir Signed-off-by: Dan Ryan --- pipenv/vendor/pythonfinder/__init__.py | 2 +- pipenv/vendor/pythonfinder/models/path.py | 71 ++++--- pipenv/vendor/pythonfinder/pythonfinder.py | 180 +++++++++++------ pipenv/vendor/pythonfinder/utils.py | 31 ++- pipenv/vendor/requirementslib/__init__.py | 2 +- .../vendor/requirementslib/models/markers.py | 19 +- .../requirementslib/models/requirements.py | 1 - .../requirementslib/models/setup_info.py | 184 ++++++++++-------- pipenv/vendor/requirementslib/models/utils.py | 6 +- pipenv/vendor/requirementslib/utils.py | 41 ++-- pipenv/vendor/vistir/__init__.py | 2 +- pipenv/vendor/vistir/_winconsole.py | 111 ++++++++++- pipenv/vendor/vistir/compat.py | 129 ++++++++++-- pipenv/vendor/vistir/misc.py | 73 ++++--- pipenv/vendor/vistir/path.py | 27 +++ tasks/release.py | 2 +- 16 files changed, 609 insertions(+), 272 deletions(-) diff --git a/pipenv/vendor/pythonfinder/__init__.py b/pipenv/vendor/pythonfinder/__init__.py index f3a981bd2d..d1f70c3b3c 100644 --- a/pipenv/vendor/pythonfinder/__init__.py +++ b/pipenv/vendor/pythonfinder/__init__.py @@ -10,7 +10,7 @@ from .models import SystemPath, WindowsFinder from .pythonfinder import Finder -__version__ = "1.2.0" +__version__ = "1.2.1" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/pythonfinder/models/path.py b/pipenv/vendor/pythonfinder/models/path.py index 55f7cb13eb..34559f7dc1 100644 --- a/pipenv/vendor/pythonfinder/models/path.py +++ b/pipenv/vendor/pythonfinder/models/path.py @@ -12,6 +12,7 @@ import six from cached_property import cached_property from vistir.compat import Path, fs_str +from vistir.misc import dedup from .mixins import BaseFinder, BasePath from .python import PythonVersion @@ -38,6 +39,7 @@ parse_asdf_version_order, parse_pyenv_version_order, path_is_known_executable, + split_version_and_name, unnest, ) @@ -209,6 +211,7 @@ def _run_setup(self): path_entries = self.paths.copy() if self.global_search and "PATH" in os.environ: path_order = path_order + os.environ["PATH"].split(os.pathsep) + path_order = list(dedup(path_order)) path_instances = [ ensure_path(p.strip('"')) for p in path_order @@ -439,7 +442,7 @@ def get_path(self, path): return _path def _get_paths(self): - # type: () -> Generator[PathType, None, None] + # type: () -> Generator[Union[PathType, WindowsFinder], None, None] for path in self.path_order: try: entry = self.get_path(path) @@ -450,7 +453,7 @@ def _get_paths(self): @cached_property def path_entries(self): - # type: () -> List[Union[PathEntry, FinderType]] + # type: () -> List[Union[PathType, WindowsFinder]] paths = list(self._get_paths()) return paths @@ -558,6 +561,7 @@ def find_python_version( dev=None, # type: Optional[bool] arch=None, # type: Optional[str] name=None, # type: Optional[str] + sort_by_path=False, # type: bool ): # type: (...) -> PathEntry """Search for a specific python version on the path. @@ -570,29 +574,12 @@ def find_python_version( :param bool dev: Search for devreleases (default None) - prioritize releases if None :param str arch: Architecture to include, e.g. '64bit', defaults to None :param str name: The name of a python version, e.g. ``anaconda3-5.3.0`` + :param bool sort_by_path: Whether to sort by path -- default sort is by version(default: False) :return: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested. :rtype: :class:`~pythonfinder.models.PathEntry` """ - if isinstance(major, six.string_types) and not minor and not patch: - # Only proceed if this is in the format "x.y.z" or similar - if major.isdigit() or (major.count(".") > 0 and major[0].isdigit()): - version = major.split(".", 2) - if isinstance(version, (tuple, list)): - if len(version) > 3: - major, minor, patch, rest = version - elif len(version) == 3: - major, minor, patch = version - elif len(version) == 2: - major, minor = version - else: - major = major[0] - else: - major = major - name = None - else: - name = "{0!s}".format(major) - major = None + major, minor, patch, name = split_version_and_name(major, minor, patch, name) sub_finder = operator.methodcaller( "find_python_version", major, minor, patch, pre, dev, arch, name ) @@ -610,6 +597,18 @@ def find_python_version( windows_finder_version = sub_finder(self.windows_finder) if windows_finder_version: return windows_finder_version + if sort_by_path: + paths = [self.get_path(k) for k in self.path_order] + for path in paths: + found_version = sub_finder(path) + if found_version: + return found_version + if alternate_sub_finder: + for path in paths: + found_version = alternate_sub_finder(path) + if found_version: + return found_version + ver = next(iter(self.get_pythons(sub_finder)), None) if not ver and alternate_sub_finder is not None: ver = next(iter(self.get_pythons(alternate_sub_finder)), None) @@ -647,9 +646,9 @@ def create( paths = [] # type: List[str] if ignore_unsupported: os.environ["PYTHONFINDER_IGNORE_UNSUPPORTED"] = fs_str("1") - # if global_search: - # if "PATH" in os.environ: - # paths = os.environ["PATH"].split(os.pathsep) + if global_search: + if "PATH" in os.environ: + paths = os.environ["PATH"].split(os.pathsep) path_order = [] if path: path_order = [path] @@ -663,18 +662,18 @@ def create( ) } ) - # paths = [path] + paths - # paths = [p for p in paths if not any(is_in_path(p, shim) for shim in SHIM_PATHS)] - # _path_objects = [ensure_path(p.strip('"')) for p in paths] - # paths = [p.as_posix() for p in _path_objects] - # path_entries.update( - # { - # p.as_posix(): PathEntry.create( - # path=p.absolute(), is_root=True, only_python=only_python - # ) - # for p in _path_objects - # } - # ) + paths = [path] + paths + paths = [p for p in paths if not any(is_in_path(p, shim) for shim in SHIM_PATHS)] + _path_objects = [ensure_path(p.strip('"')) for p in paths] + paths = [p.as_posix() for p in _path_objects] + path_entries.update( + { + p.as_posix(): PathEntry.create( + path=p.absolute(), is_root=True, only_python=only_python + ) + for p in _path_objects + } + ) instance = cls( paths=path_entries, path_order=path_order, diff --git a/pipenv/vendor/pythonfinder/pythonfinder.py b/pipenv/vendor/pythonfinder/pythonfinder.py index a68eab1e93..b0097c2236 100644 --- a/pipenv/vendor/pythonfinder/pythonfinder.py +++ b/pipenv/vendor/pythonfinder/pythonfinder.py @@ -14,11 +14,13 @@ from .utils import Iterable, filter_pythons, version_re if environment.MYPY_RUNNING: - from typing import Optional, Dict, Any, Union, List, Iterator + from typing import Optional, Dict, Any, Union, List, Iterator, Text from .models.path import Path, PathEntry from .models.windows import WindowsFinder from .models.path import SystemPath + STRING_TYPE = Union[str, Text, bytes] + class Finder(object): @@ -33,9 +35,14 @@ class Finder(object): """ def __init__( - self, path=None, system=False, global_search=True, ignore_unsupported=True + self, + path=None, + system=False, + global_search=True, + ignore_unsupported=True, + sort_by_path=False, ): - # type: (Optional[str], bool, bool, bool) -> None + # type: (Optional[str], bool, bool, bool, bool) -> None """Create a new :class:`~pythonfinder.pythonfinder.Finder` instance. :param path: A bin-directory search location, defaults to None @@ -46,12 +53,14 @@ def __init__( :param global_search: bool, optional :param ignore_unsupported: Whether to ignore unsupported python versions, if False, an error is raised, defaults to True :param ignore_unsupported: bool, optional + :param bool sort_by_path: Whether to always sort by path :returns: a :class:`~pythonfinder.pythonfinder.Finder` object. """ self.path_prepend = path # type: Optional[str] self.global_search = global_search # type: bool self.system = system # type: bool + self.sort_by_path = sort_by_path # type: bool self.ignore_unsupported = ignore_unsupported # type: bool self._system_path = None # type: Optional[SystemPath] self._windows_finder = None # type: Optional[WindowsFinder] @@ -92,7 +101,7 @@ def reload_system_path(self): self._system_path = self.create_system_path() def rehash(self): - # type: () -> None + # type: () -> "Finder" if not self._system_path: self._system_path = self.create_system_path() self.find_all_python_versions.cache_clear() @@ -123,11 +132,92 @@ def which(self, exe): # type: (str) -> Optional[PathEntry] return self.system_path.which(exe) + @classmethod + def parse_major(cls, major, minor=None, patch=None, pre=None, dev=None, arch=None): + # type: (Optional[str], Optional[int], Optional[int], Optional[bool], Optional[bool], Optional[str]) -> Dict[str, Union[int, str, bool, None]] + from .models import PythonVersion + + major_is_str = major and isinstance(major, six.string_types) + is_num = ( + major + and major_is_str + and all(part.isdigit() for part in major.split(".")[:2]) + ) + major_has_arch = ( + arch is None + and major + and major_is_str + and "-" in major + and major[0].isdigit() + ) + name = None + if major and major_has_arch: + orig_string = "{0!s}".format(major) + major, _, arch = major.rpartition("-") + if arch: + arch = arch.lower().lstrip("x").replace("bit", "") + if not (arch.isdigit() and (int(arch) & int(arch) - 1) == 0): + major = orig_string + arch = None + else: + arch = "{0}bit".format(arch) + try: + version_dict = PythonVersion.parse(major) + except (ValueError, InvalidPythonVersion): + if name is None: + name = "{0!s}".format(major) + major = None + version_dict = {} + elif major and major[0].isalpha(): + return {"major": None, "name": major, "arch": arch} + elif major and is_num: + match = version_re.match(major) + version_dict = match.groupdict() if match else {} # type: ignore + version_dict.update( + { + "is_prerelease": bool(version_dict.get("prerel", False)), + "is_devrelease": bool(version_dict.get("dev", False)), + } + ) + else: + version_dict = { + "major": major, + "minor": minor, + "patch": patch, + "pre": pre, + "dev": dev, + "arch": arch, + } + if not version_dict.get("arch") and arch: + version_dict["arch"] = arch + version_dict["minor"] = ( + int(version_dict["minor"]) if version_dict.get("minor") is not None else minor + ) + version_dict["patch"] = ( + int(version_dict["patch"]) if version_dict.get("patch") is not None else patch + ) + version_dict["major"] = ( + int(version_dict["major"]) if version_dict.get("major") is not None else major + ) + if not (version_dict["major"] or version_dict.get("name")): + version_dict["major"] = major + if name: + version_dict["name"] = name + return version_dict + @lru_cache(maxsize=1024) def find_python_version( - self, major=None, minor=None, patch=None, pre=None, dev=None, arch=None, name=None + self, + major=None, # type: Optional[Union[str, int]] + minor=None, # type: Optional[int] + patch=None, # type: Optional[int] + pre=None, # type: Optional[bool] + dev=None, # type: Optional[bool] + arch=None, # type: Optional[str] + name=None, # type: Optional[str] + sort_by_path=False, # type: bool ): - # type: (Optional[Union[str, int]], Optional[int], Optional[int], Optional[bool], Optional[bool], Optional[str], Optional[str]) -> PathEntry + # type: (...) -> Optional[PathEntry] """ Find the python version which corresponds most closely to the version requested. @@ -138,18 +228,19 @@ def find_python_version( :param Optional[bool] dev: If provided, whether to search dev-releases. :param Optional[str] arch: If provided, which architecture to search. :param Optional[str] name: *Name* of the target python, e.g. ``anaconda3-5.3.0`` + :param bool sort_by_path: Whether to sort by path -- default sort is by version(default: False) :return: A new *PathEntry* pointer at a matching python version, if one can be located. :rtype: :class:`pythonfinder.models.path.PathEntry` """ - from .models import PythonVersion - minor = int(minor) if minor is not None else minor patch = int(patch) if patch is not None else patch version_dict = { "minor": minor, "patch": patch, + "name": name, + "arch": arch, } # type: Dict[str, Union[str, int, Any]] if ( @@ -159,60 +250,22 @@ def find_python_version( and dev is None and patch is None ): - if arch is None and "-" in major and major[0].isdigit(): - orig_string = "{0!s}".format(major) - major, _, arch = major.rpartition("-") - if arch.startswith("x"): - arch = arch.lstrip("x") - if arch.lower().endswith("bit"): - arch = arch.lower().replace("bit", "") - if not (arch.isdigit() and (int(arch) & int(arch) - 1) == 0): - major = orig_string - arch = None - else: - arch = "{0}bit".format(arch) - try: - version_dict = PythonVersion.parse(major) - except (ValueError, InvalidPythonVersion): - if name is None: - name = "{0!s}".format(major) - major = None - version_dict = {} - elif major[0].isalpha(): - name = "%s" % major - major = None - else: - if "." in major and all(part.isdigit() for part in major.split(".")[:2]): - match = version_re.match(major) - version_dict = match.groupdict() - version_dict["is_prerelease"] = bool( - version_dict.get("prerel", False) - ) - version_dict["is_devrelease"] = bool(version_dict.get("dev", False)) - else: - version_dict = { - "major": major, - "minor": minor, - "patch": patch, - "pre": pre, - "dev": dev, - "arch": arch, - } - if version_dict.get("minor") is not None: - minor = int(version_dict["minor"]) - if version_dict.get("patch") is not None: - patch = int(version_dict["patch"]) - if version_dict.get("major") is not None: - major = int(version_dict["major"]) + version_dict = self.parse_major(major, minor=minor, patch=patch, arch=arch) + major = version_dict["major"] + minor = version_dict.get("minor", minor) # type: ignore + patch = version_dict.get("patch", patch) # type: ignore + arch = version_dict.get("arch", arch) # type: ignore + name = version_dict.get("name", name) # type: ignore _pre = version_dict.get("is_prerelease", pre) pre = bool(_pre) if _pre is not None else pre _dev = version_dict.get("is_devrelease", dev) dev = bool(_dev) if _dev is not None else dev - arch = ( - version_dict.get("architecture", None) if arch is None else arch - ) # type: ignore + if "architecture" in version_dict and isinstance( + version_dict["architecture"], six.string_types + ): + arch = version_dict["architecture"] # type: ignore if os.name == "nt" and self.windows_finder is not None: - match = self.windows_finder.find_python_version( + found = self.windows_finder.find_python_version( major=major, minor=minor, patch=patch, @@ -221,10 +274,17 @@ def find_python_version( arch=arch, name=name, ) - if match: - return match + if found: + return found return self.system_path.find_python_version( - major=major, minor=minor, patch=patch, pre=pre, dev=dev, arch=arch, name=name + major=major, + minor=minor, + patch=patch, + pre=pre, + dev=dev, + arch=arch, + name=name, + sort_by_path=self.sort_by_path, ) @lru_cache(maxsize=1024) diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index bf8a2f4030..1defda5f27 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -104,7 +104,7 @@ def get_python_version(path): combine_stderr=False, write_to_stdout=False, ) - timer = Timer(5, c.kill) + timer = Timer(SUBPROCESS_TIMEOUT, c.kill) except OSError: raise InvalidPythonVersion("%s is not a valid python path" % path) if not c.out: @@ -334,6 +334,35 @@ def parse_asdf_version_order(filename=".tool-versions"): return [] +def split_version_and_name( + major=None, # type: Optional[Union[str, int]] + minor=None, # type: Optional[Union[str, int]] + patch=None, # type: Optional[Union[str, int]] + name=None, # type: Optional[str] +): + # type: (...) -> Tuple[Optional[Union[str, int]], Optional[Union[str, int]], Optional[Union[str, int]], Optional[str]] + if isinstance(major, six.string_types) and not minor and not patch: + # Only proceed if this is in the format "x.y.z" or similar + if major.isdigit() or (major.count(".") > 0 and major[0].isdigit()): + version = major.split(".", 2) + if isinstance(version, (tuple, list)): + if len(version) > 3: + major, minor, patch, _ = version + elif len(version) == 3: + major, minor, patch = version + elif len(version) == 2: + major, minor = version + else: + major = major[0] + else: + major = major + name = None + else: + name = "{0!s}".format(major) + major = None + return (major, minor, patch, name) + + # TODO: Reimplement in vistir def is_in_path(path, parent): return normalize_path(str(path)).startswith(normalize_path(str(parent))) diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index c3f4b84d55..77ab414eac 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.4.3.dev0" +__version__ = "1.4.3" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index 79193b74a6..e1014917df 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -9,7 +9,7 @@ from packaging.markers import InvalidMarker, Marker from packaging.specifiers import Specifier, SpecifierSet from vistir.compat import Mapping, Set, lru_cache -from vistir.misc import _is_iterable, dedup +from vistir.misc import dedup from .utils import filter_none, validate_markers from ..environment import MYPY_RUNNING @@ -19,14 +19,14 @@ if MYPY_RUNNING: - from typing import Optional, List, Generic, Type + from typing import Optional, List, Type, Any MAX_VERSIONS = {2: 7, 3: 10} def is_instance(item, cls): - # type: (Generic, Type) -> bool + # type: (Any, Type) -> bool if isinstance(item, cls) or item.__class__.__name__ == cls.__name__: return True return False @@ -139,8 +139,12 @@ def _format_pyspec(specifier): if not any(op in specifier for op in Specifier._operators.keys()): specifier = "=={0}".format(specifier) specifier = Specifier(specifier) - version = specifier.version.replace(".*", "") - if ".*" in specifier.version: + version = getattr(specifier, "version", specifier).rstrip() + if version and version.endswith("*"): + if version.endswith(".*"): + version = version.rstrip(".*") + else: + version = version.rstrip("*") specifier = Specifier("{0}{1}".format(specifier.operator, version)) try: op = REPLACE_RANGES[specifier.operator] @@ -198,7 +202,10 @@ def _group_by_op(specs): @lru_cache(maxsize=128) def cleanup_pyspecs(specs, joiner="or"): - specs = {_format_pyspec(spec) for spec in specs} + if isinstance(specs, six.string_types): + specs = set([_format_pyspec(specs)]) + else: + specs = {_format_pyspec(spec) for spec in specs} # for != operator we want to group by version # if all are consecutive, join as a list results = set() diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 30dbec4646..cb8710db71 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -4,7 +4,6 @@ import collections import copy -import hashlib import os import sys from contextlib import contextmanager diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 8fe6506880..872ae4caa4 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -23,15 +23,7 @@ from packaging.markers import Marker from six.moves import configparser from six.moves.urllib.parse import unquote, urlparse, urlunparse -from vistir.compat import ( - FileNotFoundError, - Iterable, - Mapping, - Path, - fs_decode, - fs_encode, - lru_cache, -) +from vistir.compat import FileNotFoundError, Iterable, Mapping, Path, lru_cache from vistir.contextmanagers import cd, temp_path from vistir.misc import run from vistir.path import create_tracked_tempdir, ensure_mkdir_p, mkdir_p, rmtree @@ -151,7 +143,7 @@ def __init__(self, source_dir, build_backend): def parse_special_directives(setup_entry, package_dir=None): - # type: (S, Optional[S]) -> S + # type: (S, Optional[STRING_TYPE]) -> S rv = setup_entry if not package_dir: package_dir = os.getcwd() @@ -209,71 +201,92 @@ def setuptools_parse_setup_cfg(path): return results -def parse_setup_cfg(setup_cfg_path): - # type: (S) -> Dict[S, Union[S, None, Set[BaseRequirement], List[S], Tuple[S, Tuple[BaseRequirement]]]] - if os.path.exists(setup_cfg_path): - try: - return setuptools_parse_setup_cfg(setup_cfg_path) - except Exception: - pass - default_opts = { - "metadata": {"name": "", "version": ""}, - "options": { - "install_requires": "", - "python_requires": "", - "build_requires": "", - "setup_requires": "", - "extras": "", - "packages.find": {"where": "."}, - }, - } - parser = configparser.ConfigParser(default_opts) - parser.read(setup_cfg_path) - results = {} +def get_package_dir_from_setupcfg(parser, base_dir=None): + # type: (configparser.ConfigParser, STRING_TYPE) -> Text + if not base_dir: package_dir = os.getcwd() - if parser.has_option("options", "packages.find"): - pkg_dir = parser.get("options", "packages.find") - if isinstance(package_dir, Mapping): - package_dir = os.path.join(package_dir, pkg_dir.get("where")) - elif parser.has_option("options", "packages"): - pkg_dir = parser.get("options", "packages") - if "find:" in pkg_dir: - _, pkg_dir = pkg_dir.split("find:") - pkg_dir = pkg_dir.strip() - package_dir = os.path.join(package_dir, pkg_dir) - if parser.has_option("metadata", "name"): - results["name"] = parse_special_directives( - parser.get("metadata", "name"), package_dir - ) - if parser.has_option("metadata", "version"): - results["version"] = parse_special_directives( - parser.get("metadata", "version"), package_dir - ) - install_requires = set() # type: Set[BaseRequirement] - if parser.has_option("options", "install_requires"): - install_requires = make_base_requirements( - parser.get("options", "install_requires").split("\n") - ) - results["install_requires"] = install_requires - if parser.has_option("options", "python_requires"): - results["python_requires"] = parse_special_directives( - parser.get("options", "python_requires"), package_dir - ) - if parser.has_option("options", "build_requires"): - results["build_requires"] = parser.get("options", "build_requires") - extras = {} - if "options.extras_require" in parser.sections(): - extras_require_section = parser.options("options.extras_require") - for section in extras_require_section: - if section in ["options", "metadata"]: - continue - section_contents = parser.get("options.extras_require", section) - section_list = section_contents.split("\n") - section_extras = tuple(make_base_requirements(section_list)) - if section_extras: - extras[section] = section_extras - results["extras_require"] = extras - return results + else: + package_dir = base_dir + if parser.has_option("options", "packages.find"): + pkg_dir = parser.get("options", "packages.find") + if isinstance(package_dir, Mapping): + package_dir = os.path.join(package_dir, pkg_dir.get("where")) + elif parser.has_option("options", "packages"): + pkg_dir = parser.get("options", "packages") + if "find:" in pkg_dir: + _, pkg_dir = pkg_dir.split("find:") + pkg_dir = pkg_dir.strip() + package_dir = os.path.join(package_dir, pkg_dir) + return package_dir + + +def get_name_and_version_from_setupcfg(parser, package_dir): + # type: (configparser.ConfigParser, STRING_TYPE) -> Tuple[Optional[S], Optional[S]] + name, version = None, None + if parser.has_option("metadata", "name"): + name = parse_special_directives(parser.get("metadata", "name"), package_dir) + if parser.has_option("metadata", "version"): + version = parse_special_directives(parser.get("metadata", "version"), package_dir) + return name, version + + +def get_extras_from_setupcfg(parser): + # type: (configparser.ConfigParser) -> Dict[STRING_TYPE, Tuple[BaseRequirement, ...]] + extras = {} # type: Dict[STRING_TYPE, Tuple[BaseRequirement, ...]] + if "options.extras_require" not in parser.sections(): + return extras + extras_require_section = parser.options("options.extras_require") + for section in extras_require_section: + if section in ["options", "metadata"]: + continue + section_contents = parser.get("options.extras_require", section) + section_list = section_contents.split("\n") + section_extras = tuple(make_base_requirements(section_list)) + if section_extras: + extras[section] = section_extras + return extras + + +def parse_setup_cfg(setup_cfg_path): + # type: (S) -> Dict[S, Union[S, None, Set[BaseRequirement], List[S], Dict[STRING_TYPE, Tuple[BaseRequirement]]]] + if not os.path.exists(setup_cfg_path): + raise FileNotFoundError(setup_cfg_path) + try: + return setuptools_parse_setup_cfg(setup_cfg_path) + except Exception: + pass + default_opts = { + "metadata": {"name": "", "version": ""}, + "options": { + "install_requires": "", + "python_requires": "", + "build_requires": "", + "setup_requires": "", + "extras": "", + "packages.find": {"where": "."}, + }, + } + parser = configparser.ConfigParser(default_opts) + parser.read(setup_cfg_path) + results = {} + package_dir = get_package_dir_from_setupcfg(parser, base_dir=os.getcwd()) + name, version = get_name_and_version_from_setupcfg(parser, package_dir) + results["name"] = name + results["version"] = version + install_requires = set() # type: Set[BaseRequirement] + if parser.has_option("options", "install_requires"): + install_requires = make_base_requirements( + parser.get("options", "install_requires").split("\n") + ) + results["install_requires"] = install_requires + if parser.has_option("options", "python_requires"): + results["python_requires"] = parse_special_directives( + parser.get("options", "python_requires"), package_dir + ) + if parser.has_option("options", "build_requires"): + results["build_requires"] = parser.get("options", "build_requires") + results["extras_require"] = get_extras_from_setupcfg(parser) + return results @contextlib.contextmanager @@ -526,8 +539,10 @@ def get_metadata_from_wheel(wheel_path): name = metadata.name version = metadata.version requires = [] - extras_keys = getattr(metadata, "extras", []) - extras = {k: [] for k in extras_keys} + extras_keys = getattr(metadata, "extras", []) # type: List[STRING_TYPE] + extras = { + k: [] for k in extras_keys + } # type: Dict[STRING_TYPE, List[RequirementType]] for req in getattr(metadata, "run_requires", []): parsed_req = init_requirement(req) parsed_marker = parsed_req.marker @@ -555,7 +570,7 @@ def get_metadata_from_dist(dist): dep_map = dist._build_dep_map() except Exception: dep_map = {} - deps = [] + deps = [] # type: List[PkgResourcesRequirement] extras = {} for k in dep_map.keys(): if k is None: @@ -573,12 +588,14 @@ def get_metadata_from_dist(dist): else: marker = "" extra = "{0}".format(k) - _deps = ["{0}{1}".format(str(req), marker) for req in _deps] - _deps = ensure_reqs(tuple(_deps)) + _deps = ensure_reqs( + tuple(["{0}{1}".format(str(req), marker) for req in _deps]) + ) if extra: extras[extra] = _deps else: deps.extend(_deps) + requires.extend(deps) return { "name": dist.project_name, "version": dist.version, @@ -634,6 +651,7 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no unparsed = unparse(item.value) elif isinstance(item, ast.Name): if not initial_mapping: + unparsed = item.id if analyzer and recurse: if item in analyzer.assignments: items = unparse(analyzer.assignments[item]) @@ -643,10 +661,6 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no if assignment is not None: items = unparse(analyzer.assignments[assignment]) unparsed = items.get(item.id, item.id) - else: - unparsed = item.id - else: - unparsed = item.id else: unparsed = item elif six.PY3 and isinstance(item, ast.NameConstant): @@ -915,7 +929,7 @@ def egg_base(self): base = Path(self.extra_kwargs["src_dir"]) egg_base = base.joinpath("reqlib-metadata") if not egg_base.exists(): - atexit.register(rmtree, fs_encode(egg_base.as_posix())) + atexit.register(rmtree, egg_base.as_posix()) egg_base.mkdir(parents=True, exist_ok=True) return egg_base.as_posix() @@ -1160,7 +1174,7 @@ def get_egg_metadata(self, metadata_dir=None, metadata_type=None): metadata = [ get_metadata(d, pkg_name=self.name, metadata_type=metadata_type) for d in metadata_dirs - if os.path.exists(fs_encode(d)) + if os.path.exists(d) ] metadata = next(iter(d for d in metadata if d), None) return metadata diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index dd5afcbb9d..2f4c26c225 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -27,7 +27,7 @@ from vistir.path import is_valid_url from ..environment import MYPY_RUNNING -from ..utils import SCHEME_LIST, VCS_LIST, add_ssh_scheme_to_git_uri, is_star +from ..utils import SCHEME_LIST, VCS_LIST, is_star if MYPY_RUNNING: from typing import ( @@ -43,9 +43,9 @@ Text, AnyStr, Match, - Iterable, + Iterable, # noqa ) - from attr import _ValidatorType + from attr import _ValidatorType # noqa from packaging.requirements import Requirement as PackagingRequirement from pkg_resources import Requirement as PkgResourcesRequirement from pkg_resources.extern.packaging.markers import ( diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index 7650d764a4..503a13d071 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -1,51 +1,40 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function -import contextlib import logging import os import sys import pip_shims.shims import six +import six.moves import tomlkit import vistir from six.moves.urllib.parse import urlparse, urlsplit, urlunparse from vistir.compat import Path -from vistir.path import create_tracked_tempdir, ensure_mkdir_p, is_valid_url +from vistir.path import ensure_mkdir_p, is_valid_url from .environment import MYPY_RUNNING # fmt: off -six.add_move( - six.MovedAttribute("Mapping", "collections", "collections.abc") -) # type: ignore # noqa # isort:skip -six.add_move( - six.MovedAttribute("Sequence", "collections", "collections.abc") -) # type: ignore # noqa # isort:skip -six.add_move( - six.MovedAttribute("Set", "collections", "collections.abc") -) # type: ignore # noqa # isort:skip -six.add_move( - six.MovedAttribute("ItemsView", "collections", "collections.abc") -) # type: ignore # noqa +six.add_move( # type: ignore + six.MovedAttribute("Mapping", "collections", "collections.abc") # type: ignore +) # noqa # isort:skip +six.add_move( # type: ignore + six.MovedAttribute("Sequence", "collections", "collections.abc") # type: ignore +) # noqa # isort:skip +six.add_move( # type: ignore + six.MovedAttribute("Set", "collections", "collections.abc") # type: ignore +) # noqa # isort:skip +six.add_move( # type: ignore + six.MovedAttribute("ItemsView", "collections", "collections.abc") # type: ignore +) # noqa from six.moves import ItemsView, Mapping, Sequence, Set # type: ignore # noqa # isort:skip # fmt: on if MYPY_RUNNING: - from typing import ( - Dict, - Any, - Optional, - Union, - Tuple, - List, - Iterable, - Generator, - Text, - TypeVar, - ) + from typing import Dict, Any, Optional, Union, Tuple, List, Iterable, Text, TypeVar STRING_TYPE = Union[bytes, str, Text] S = TypeVar("S", bytes, str, Text) diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index aa7831a538..6ad359047d 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -36,7 +36,7 @@ from .path import create_tracked_tempdir, create_tracked_tempfile, mkdir_p, rmtree from .spin import create_spinner -__version__ = "0.4.0" +__version__ = "0.4.1" __all__ = [ diff --git a/pipenv/vendor/vistir/_winconsole.py b/pipenv/vendor/vistir/_winconsole.py index 8f176ddf85..22eea2cd94 100644 --- a/pipenv/vendor/vistir/_winconsole.py +++ b/pipenv/vendor/vistir/_winconsole.py @@ -61,8 +61,9 @@ WINFUNCTYPE, ) from ctypes.wintypes import LPWSTR, LPCWSTR +from itertools import count from six import PY2, text_type -from .misc import StreamWrapper +from .misc import StreamWrapper, run try: from ctypes import pythonapi @@ -391,3 +392,111 @@ def show_cursor(): def get_stream_handle(stream): return STREAM_MAP.get(stream.fileno()) + + +def _walk_for_powershell(directory): + for path, dirs, files in os.walk(directory): + powershell = next( + iter(fn for fn in files if fn.lower() == "powershell.exe"), None + ) + if powershell is not None: + return os.path.join(directory, powershell) + for subdir in dirs: + powershell = _walk_for_powershell(os.path.join(directory, subdir)) + if powershell: + return powershell + return None + + +def _get_powershell_path(): + paths = [ + os.path.expandvars(r"%windir%\{0}\WindowsPowerShell").format(subdir) + for subdir in ("SysWOW64", "system32") + ] + powershell_path = next(iter(_walk_for_powershell(pth) for pth in paths), None) + if not powershell_path: + powershell_path, _ = run( + ["where", "powershell"], block=True, nospin=True, return_object=False + ) + if powershell_path: + return powershell_path.strip() + return None + + +def _get_sid_with_powershell(): + powershell_path = _get_powershell_path() + if not powershell_path: + return None + args = [ + powershell_path, + "-ExecutionPolicy", + "Bypass", + "-Command", + "Invoke-Expression '[System.Security.Principal.WindowsIdentity]::GetCurrent().user | Write-Host'", + ] + sid, _ = run(args, nospin=True) + return sid.strip() + + +def _get_sid_from_registry(): + try: + import winreg + except ImportError: + import _winreg as winreg + var_names = ("%USERPROFILE%", "%HOME%") + current_user_home = next(iter(os.path.expandvars(v) for v in var_names if v), None) + root, subkey = ( + winreg.HKEY_LOCAL_MACHINE, + r"Software\Microsoft\Windows NT\CurrentVersion\ProfileList", + ) + subkey_names = [] + value = None + matching_key = None + try: + with winreg.OpenKeyEx(root, subkey, 0, winreg.KEY_READ) as key: + for i in count(): + key_name = winreg.EnumKey(key, i) + subkey_names.append(key_name) + value = query_registry_value( + root, r"{0}\{1}".format(subkey, key_name), "ProfileImagePath" + ) + if value and value.lower() == current_user_home.lower(): + matching_key = key_name + break + except OSError: + pass + if matching_key is not None: + return matching_key + + +def get_value_from_tuple(value, value_type): + try: + import winreg + except ImportError: + import _winreg as winreg + if value_type in (winreg.REG_SZ, winreg.REG_EXPAND_SZ): + if "\0" in value: + return value[: value.index("\0")] + return value + return None + + +def query_registry_value(root, key_name, value): + try: + import winreg + except ImportError: + import _winreg as winreg + try: + with winreg.OpenKeyEx(root, key_name, 0, winreg.KEY_READ) as key: + return get_value_from_tuple(*winreg.QueryValueEx(key, value)) + except OSError: + return None + + +def get_current_user(): + fns = (_get_sid_from_registry, _get_sid_with_powershell) + for fn in fns: + result = fn() + if result: + return result + return None diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index 6c683747a4..417a785436 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -43,7 +43,7 @@ if sys.version_info >= (3, 5): from pathlib import Path else: - from pipenv.vendor.pathlib2 import Path + from pathlib2 import Path if six.PY3: # Only Python 3.4+ is supported @@ -53,12 +53,14 @@ from weakref import finalize else: # Only Python 2.7 is supported - from pipenv.vendor.backports.functools_lru_cache import lru_cache + from backports.functools_lru_cache import lru_cache from .backports.functools import partialmethod # type: ignore - from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + from backports.shutil_get_terminal_size import get_terminal_size + from .backports.surrogateescape import register_surrogateescape + register_surrogateescape() NamedTemporaryFile = _NamedTemporaryFile - from pipenv.vendor.backports.weakref import finalize # type: ignore + from backports.weakref import finalize # type: ignore try: # Introduced Python 3.5 @@ -245,6 +247,72 @@ def _get_path(path): return +# copied from the os backport which in turn copied this from +# the pyutf8 package -- +# URL: https://github.com/etrepum/pyutf8/blob/master/pyutf8/ref.py +# +def _invalid_utf8_indexes(bytes): + skips = [] + i = 0 + len_bytes = len(bytes) + while i < len_bytes: + c1 = bytes[i] + if c1 < 0x80: + # U+0000 - U+007F - 7 bits + i += 1 + continue + try: + c2 = bytes[i + 1] + if (c1 & 0xE0 == 0xC0) and (c2 & 0xC0 == 0x80): + # U+0080 - U+07FF - 11 bits + c = ((c1 & 0x1F) << 6) | (c2 & 0x3F) + if c < 0x80: # pragma: no cover + # Overlong encoding + skips.extend([i, i + 1]) # pragma: no cover + i += 2 + continue + c3 = bytes[i + 2] + if (c1 & 0xF0 == 0xE0) and (c2 & 0xC0 == 0x80) and (c3 & 0xC0 == 0x80): + # U+0800 - U+FFFF - 16 bits + c = ((((c1 & 0x0F) << 6) | (c2 & 0x3F)) << 6) | (c3 & 0x3F) + if (c < 0x800) or (0xD800 <= c <= 0xDFFF): + # Overlong encoding or surrogate. + skips.extend([i, i + 1, i + 2]) + i += 3 + continue + c4 = bytes[i + 3] + if ( + (c1 & 0xF8 == 0xF0) + and (c2 & 0xC0 == 0x80) + and (c3 & 0xC0 == 0x80) + and (c4 & 0xC0 == 0x80) + ): + # U+10000 - U+10FFFF - 21 bits + c = ((((((c1 & 0x0F) << 6) | (c2 & 0x3F)) << 6) | (c3 & 0x3F)) << 6) | ( + c4 & 0x3F + ) + if (c < 0x10000) or (c > 0x10FFFF): # pragma: no cover + # Overlong encoding or invalid code point. + skips.extend([i, i + 1, i + 2, i + 3]) + i += 4 + continue + except IndexError: + pass + skips.append(i) + i += 1 + return skips + + +# XXX backport: Another helper to support the Python 2 UTF-8 decoding hack. +def _chunks(b, indexes): + i = 0 + for j in indexes: + yield b[i:j] + yield b[j : j + 1] + i = j + 1 + yield b[i:] + + def fs_encode(path): """ Encode a filesystem path to the proper filesystem encoding @@ -257,7 +325,16 @@ def fs_encode(path): if path is None: raise TypeError("expected a valid path to encode") if isinstance(path, six.text_type): - path = path.encode(_fs_encoding, _fs_encode_errors) + if six.PY2: + return b"".join( + ( + _byte(ord(c) - 0xDC00) + if 0xDC00 <= ord(c) <= 0xDCFF + else c.encode(_fs_encoding, _fs_encode_errors) + ) + for c in path + ) + return path.encode(_fs_encoding, _fs_encode_errors) return path @@ -266,35 +343,49 @@ def fs_decode(path): Decode a filesystem path using the proper filesystem encoding :param path: The filesystem path to decode from bytes or string - :return: [description] - :rtype: [type] + :return: The filesystem path, decoded with the determined encoding + :rtype: Text """ path = _get_path(path) if path is None: raise TypeError("expected a valid path to decode") if isinstance(path, six.binary_type): - path = path.decode(_fs_encoding, _fs_decode_errors) + if six.PY2: + from array import array + + indexes = _invalid_utf8_indexes(array(str("B"), path)) + return "".join( + chunk.decode(_fs_encoding, _fs_decode_errors) + for chunk in _chunks(path, indexes) + ) + return path.decode(_fs_encoding, _fs_decode_errors) return path -if sys.version_info >= (3, 3) and os.name != "nt": - _fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() +if sys.version_info[0] < 3: + _fs_encode_errors = "surrogateescape" + _fs_decode_errors = "surrogateescape" + _fs_encoding = "utf-8" else: _fs_encoding = "utf-8" - -if six.PY3: - if os.name == "nt": + if sys.platform.startswith("win"): _fs_error_fn = None - alt_strategy = "surrogatepass" + if sys.version_info[:2] > (3, 4): + alt_strategy = "surrogatepass" + else: + alt_strategy = "surrogateescape" else: + if sys.version_info >= (3, 3): + _fs_encoding = sys.getfilesystemencoding() + if not _fs_encoding: + _fs_encoding = sys.getdefaultencoding() alt_strategy = "surrogateescape" _fs_error_fn = getattr(sys, "getfilesystemencodeerrors", None) - _fs_encode_errors = _fs_error_fn() if _fs_error_fn is not None else alt_strategy - _fs_decode_errors = _fs_error_fn() if _fs_error_fn is not None else alt_strategy -else: - _fs_encode_errors = "backslashreplace" - _fs_decode_errors = "replace" + _fs_encode_errors = _fs_error_fn() if _fs_error_fn else alt_strategy + _fs_decode_errors = _fs_error_fn() if _fs_error_fn else alt_strategy + +_byte = chr if sys.version_info < (3,) else lambda i: bytes([i]) def to_native_string(string): diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index 63f7dc5bfa..ae7268608d 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -222,13 +222,15 @@ def _create_subprocess( c = _spawn_subprocess( cmd, env=env, block=block, cwd=cwd, combine_stderr=combine_stderr ) - except Exception: + except Exception as exc: import traceback - formatted_tb = "".join(traceback.format_exception(*sys.exc_info())) - sys.stderr.write("Error while executing command %s:" % " ".join(cmd._parts)) - sys.stderr.write(formatted_tb) - raise + formatted_tb = "".join(traceback.format_exception(*sys.exc_info())) # pragma: no cover + sys.stderr.write( # pragma: no cover + "Error while executing command %s:" % to_native_string(" ".join(cmd._parts)) # pragma: no cover + ) # pragma: no cover + sys.stderr.write(formatted_tb) # pragma: no cover + raise exc # pragma: no cover if not block: c.stdin.close() spinner_orig_text = "" @@ -397,14 +399,14 @@ def partialclass(cls, *args, **kwargs): # Swiped from attrs.make_class try: type_.__module__ = sys._getframe(1).f_globals.get("__name__", "__main__") - except (AttributeError, ValueError): - pass + except (AttributeError, ValueError): # pragma: no cover + pass # pragma: no cover return type_ # Borrowed from django -- force bytes and decode -- see link for details: # https://github.com/django/django/blob/fc6b90b/django/utils/encoding.py#L112 -def to_bytes(string, encoding="utf-8", errors="ignore"): +def to_bytes(string, encoding="utf-8", errors=None): """Force a value to bytes. :param string: Some input that can be converted to a bytes. @@ -415,16 +417,20 @@ def to_bytes(string, encoding="utf-8", errors="ignore"): :rtype: bytes """ + unicode_name = get_canonical_encoding_name("utf-8") if not errors: - if encoding.lower() == "utf-8": - errors = "surrogateescape" if six.PY3 else "ignore" + if get_canonical_encoding_name(encoding) == unicode_name: + if six.PY3 and os.name == "nt": + errors = "surrogatepass" + else: + errors = "surrogateescape" if six.PY3 else "ignore" else: errors = "strict" if isinstance(string, bytes): - if encoding.lower() == "utf-8": + if get_canonical_encoding_name(encoding) == unicode_name: return string else: - return string.decode("utf-8").encode(encoding, errors) + return string.decode(unicode_name).encode(encoding, errors) elif isinstance(string, memoryview): return bytes(string) elif not isinstance(string, six.string_types): @@ -452,9 +458,13 @@ def to_text(string, encoding="utf-8", errors=None): :rtype: str """ + unicode_name = get_canonical_encoding_name("utf-8") if not errors: - if encoding.lower() == "utf-8": - errors = "surrogateescape" if six.PY3 else "ignore" + if get_canonical_encoding_name(encoding) == unicode_name: + if six.PY3 and os.name == "nt": + errors = "surrogatepass" + else: + errors = "surrogateescape" if six.PY3 else "ignore" else: errors = "strict" if issubclass(type(string), six.text_type): @@ -801,17 +811,16 @@ def _isatty(stream): if os.name == "nt" or sys.platform.startswith("win"): - def _wrap_for_color(stream, allow_color=True): - if colorama is not None: + if colorama is not None: + def _wrap_for_color(stream, color=None): try: cached = _color_stream_cache.get(stream) except KeyError: cached = None if cached is not None: return cached - if not _isatty(stream): - allow_color = False - _color_wrapper = colorama.AnsiToWin32(stream, strip=not allow_color) + strip = not _can_use_color(stream, color) + _color_wrapper = colorama.AnsiToWin32(stream, strip=strip) result = _color_wrapper.stream _write = result.write @@ -829,8 +838,6 @@ def _write_with_color(s): pass return result - return stream - def _cached_stream_lookup(stream_lookup_func, stream_resolution_func): stream_cache = WeakKeyDictionary() @@ -853,7 +860,7 @@ def lookup(): return lookup -def get_text_stream(stream="stdout", encoding=None, allow_color=True): +def get_text_stream(stream="stdout", encoding=None): """Retrieve a unicode stream wrapper around **sys.stdout** or **sys.stderr**. :param str stream: The name of the stream to wrap from the :mod:`sys` module. @@ -916,15 +923,19 @@ def replace_with_text_stream(stream_name): return None -def _can_use_color(stream=None, fg=None, bg=None, style=None): - if not any([fg, bg, style]): +def _can_use_color(stream=None, color=None): + from .termcolors import DISABLE_COLORS + + if DISABLE_COLORS: + return False + if not color: if not stream: stream = sys.stdin return _isatty(stream) - return any([fg, bg, style]) + return bool(color) -def echo(text, fg=None, bg=None, style=None, file=None, err=False): +def echo(text, fg=None, bg=None, style=None, file=None, err=False, color=None): """Write the given text to the provided stream or **sys.stdout** by default. Provides optional foreground and background colors from the ansi defaults: @@ -939,6 +950,7 @@ def echo(text, fg=None, bg=None, style=None, file=None, err=False): :param str bg: Foreground color to use (default: None) :param str style: Style to use (default: None) :param stream file: File to write to (default: None) + :param bool color: Whether to force color (i.e. ANSI codes are in the text) """ if file and not hasattr(file, "write"): @@ -963,12 +975,13 @@ def echo(text, fg=None, bg=None, style=None, file=None, err=False): buffer.flush() return if text and not is_bytes(text): - can_use_color = _can_use_color(file, fg=fg, bg=bg, style=style) - if os.name == "nt": + can_use_color = _can_use_color(file, color=color) + if any([fg, bg, style]): text = colorize(text, fg=fg, bg=bg, attrs=style) - file = _wrap_for_color(file, allow_color=can_use_color) - elif not can_use_color: + if not can_use_color or (os.name == "nt" and not _wrap_for_color): text = ANSI_REMOVAL_RE.sub("", text) + elif os.name == "nt" and _wrap_for_color: + file = _wrap_for_color(file, color=color) if text: file.write(text) file.flush() diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 71d36f1c6a..8ea408f98d 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -306,6 +306,22 @@ def create_tracked_tempfile(*args, **kwargs): return _NamedTemporaryFile(*args, **kwargs) +def _find_icacls_exe(): + if os.name == "nt": + paths = [ + os.path.expandvars(r"%windir%\{0}").format(subdir) + for subdir in ("system32", "SysWOW64") + ] + for path in paths: + icacls_path = next( + iter(fn for fn in os.listdir(path) if fn.lower() == "icacls.exe"), None + ) + if icacls_path is not None: + icacls_path = os.path.join(path, icacls_path) + return icacls_path + return None + + def set_write_bit(fn): # type: (str) -> None """ @@ -321,6 +337,17 @@ def set_write_bit(fn): return file_stat = os.stat(fn).st_mode os.chmod(fn, file_stat | stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) + if os.name == "nt": + from ._winconsole import get_current_user + + user_sid = get_current_user() + icacls_exe = _find_icacls_exe() or "icacls" + from .misc import run + if user_sid: + _, err = run([icacls_exe, "/grant", "{0}:WD".format(user_sid), "''{0}''".format(fn), "/T", "/C", "/Q"]) + if not err: + return + if not os.path.isdir(fn): for path in [fn, os.path.dirname(fn)]: try: diff --git a/tasks/release.py b/tasks/release.py index 375d73020c..dc76a5f175 100644 --- a/tasks/release.py +++ b/tasks/release.py @@ -129,7 +129,7 @@ def build_dists(ctx): log('Building sdist using %s ....' % executable) os.environ["PIPENV_PYTHON"] = py_version ctx.run('pipenv install --dev', env=env) - ctx.run('pipenv run pip install -e . --upgrade --upgrade-strategy=eager', env=env) + ctx.run('pipenv run pip install -e . --upgrade --upgrade-strategy=eager --no-use-pep517', env=env) log('Building wheel using python %s ....' % py_version) if py_version == '3.6': ctx.run('pipenv run python setup.py sdist bdist_wheel', env=env) From 1384a25a38d54ce3041fda297e295d5d07fd932f Mon Sep 17 00:00:00 2001 From: frostming Date: Tue, 14 May 2019 09:54:18 +0800 Subject: [PATCH 30/81] make check unused work --- pipenv/core.py | 6 +++--- tests/integration/test_cli.py | 9 +++++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 76ce7e8b47..9e190343fb 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -242,7 +242,7 @@ def import_from_code(path="."): rs = [] try: - for r in pipreqs.get_all_imports(path): + for r in pipreqs.get_all_imports(path, encoding="utf-8"): if r not in BAD_PACKAGES: rs.append(r) pkg_names = pipreqs.get_pkg_names(rs) @@ -2534,8 +2534,8 @@ def do_check( if not args: args = [] if unused: - deps_required = [k for k in project.packages.keys()] - deps_needed = import_from_code(unused) + deps_required = [k.lower() for k in project.packages.keys()] + deps_needed = [k.lower() for k in import_from_code(unused)] for dep in deps_needed: try: deps_required.remove(dep) diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index a38883e0ed..ae1b13dde5 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -47,7 +47,7 @@ def test_pipenv_site_packages(PipenvInstance): c = p.pipenv('--python python --site-packages') assert c.return_code == 0 assert 'Making site-packages available' in c.err - + # no-global-site-packages.txt under stdlib dir should not exist. c = p.pipenv('run python -c "import sysconfig; print(sysconfig.get_path(\'stdlib\'))"') assert c.return_code == 0 @@ -215,20 +215,21 @@ def test_install_parse_error(PipenvInstance, pypi): @pytest.mark.code @pytest.mark.check @pytest.mark.unused -@pytest.mark.skip(reason="non-deterministic") def test_check_unused(PipenvInstance, pypi): with PipenvInstance(chdir=True, pypi=pypi) as p: with open('__init__.py', 'w') as f: contents = """ import tablib import records +import flask """.strip() f.write(contents) p.pipenv('install requests') p.pipenv('install tablib') - p.pipenv('install records') + p.pipenv('install flask') - assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'tablib', 'records']) + assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'tablib', 'flask']) c = p.pipenv('check --unused .') assert 'tablib' not in c.out + assert 'flask' not in c.out From 289eac386f7f2651db5fe27203c895e87b499dfc Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 01:27:42 -0400 Subject: [PATCH 31/81] Update vendored dependencies Signed-off-by: Dan Ryan --- news/3298.vendor.rst | 24 +++++++----- pipenv/vendor/vendor.txt | 30 +++++++-------- .../patches/vendor/vistir-imports.patch | 38 ++++++++++--------- 3 files changed, 50 insertions(+), 42 deletions(-) diff --git a/news/3298.vendor.rst b/news/3298.vendor.rst index 40f9b5104e..cab9a50bfb 100644 --- a/news/3298.vendor.rst +++ b/news/3298.vendor.rst @@ -1,28 +1,34 @@ Updated vendored dependencies: - **attrs**: ``18.2.0`` => ``19.1.0`` - - **certifi**: ``2018.10.15`` => ``2018.11.29`` + - **certifi**: ``2018.10.15`` => ``2019.3.9`` - **cached_property**: ``1.4.3`` => ``1.5.1`` + - **cerberus**: ``1.2.0`` => ``1.3.1`` + - **click-completion**: ``0.5.0`` => ``0.5.1`` - **colorama**: ``0.3.9`` => ``0.4.1`` + - **distlib**: ``0.2.8`` => ``0.2.9`` - **idna**: ``2.7`` => ``2.8`` + - **jinja2**: ``2.10.0`` => ``2.10.1`` - **markupsafe**: ``1.0`` => ``1.1.1`` - **orderedmultidict**: ``(new)`` => ``1.0`` - **packaging**: ``18.0`` => ``19.0`` - - **parse**: ``1.9.0`` => ``1.11.1`` + - **parse**: ``1.9.0`` => ``1.12.0`` - **pathlib2**: ``2.3.2`` => ``2.3.3`` - **pep517**: ``(new)`` => ``0.5.0`` + - **pexpect**: ``4.6.0`` => ``4.7.0`` - **pipdeptree**: ``0.13.0`` => ``0.13.2`` - **pyparsing**: ``2.2.2`` => ``2.3.1`` - - **python-dotenv**: ``0.9.1`` => ``0.10.1`` - - **pythonfinder**: ``1.1.10`` => ``1.2.0`` + - **python-dotenv**: ``0.9.1`` => ``0.10.2`` + - **pythonfinder**: ``1.1.10`` => ``1.2.1`` - **pytoml**: ``(new)`` => ``0.1.20`` - **requests**: ``2.20.1`` => ``2.21.0`` - - **requirementslib**: ``1.3.3`` => ``1.4.2`` - - **shellingham**: ``1.2.7`` => ``1.2.8`` + - **requirementslib**: ``1.3.3`` => ``1.5.0`` + - **scandir**: ``1.9.0`` => ``1.10.0`` + - **shellingham**: ``1.2.7`` => ``1.3.1`` - **six**: ``1.11.0`` => ``1.12.0`` - **tomlkit**: ``0.5.2`` => ``0.5.3`` - - **urllib3**: ``1.24`` => ``1.24.1`` - - **vistir**: ``0.3.0`` => ``0.3.1`` - - **yaspin**: ``0.14.0`` => ``0.14.1`` + - **urllib3**: ``1.24`` => ``1.25.2`` + - **vistir**: ``0.3.0`` => ``0.4.1`` + - **yaspin**: ``0.14.0`` => ``0.14.3`` - Removed vendored dependency **cursor**. diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index f1fe99c061..c5ce4c3f6f 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -3,48 +3,48 @@ backports.shutil_get_terminal_size==1.0.0 backports.weakref==1.0.post1 blindspin==2.0.1 click==7.0 -click-completion==0.5.0 +click-completion==0.5.1 click-didyoumean==0.0.3 colorama==0.4.1 delegator.py==0.1.1 - pexpect==4.6.0 + pexpect==4.7.0 ptyprocess==0.6.0 -python-dotenv==0.10.1 +python-dotenv==0.10.2 first==2.0.1 iso8601==0.1.12 -jinja2==2.10 +jinja2==2.10. markupsafe==1.1.1 -parse==1.11.1 +parse==1.12.0 pathlib2==2.3.3 - scandir==1.9 + scandir==1.10 pipdeptree==0.13.2 pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 -pythonfinder==1.2.0 +pythonfinder==1.2.1 requests==2.21.0 chardet==3.0.4 idna==2.8 - urllib3==1.24.1 - certifi==2018.11.29 -requirementslib==1.4.2 + urllib3==1.25.2 + certifi==2019.3.9 +requirementslib==1.5.0 attrs==19.1.0 - distlib==0.2.8 + distlib==0.2.9 packaging==19.0 pyparsing==2.3.1 git+https://github.com/sarugaku/plette.git@master#egg=plette tomlkit==0.5.3 -shellingham==1.2.8 +shellingham==1.3.1 six==1.12.0 semver==2.8.1 shutilwhich==1.1.0 toml==0.10.0 cached-property==1.5.1 -vistir==0.3.1 +vistir==0.4.1 pip-shims==0.3.2 enum34==1.1.6 -yaspin==0.14.1 -cerberus==1.2 +yaspin==0.14.3 +cerberus==1.3.1 resolvelib==0.2.2 backports.functools_lru_cache==1.5 pep517==0.5.0 diff --git a/tasks/vendoring/patches/vendor/vistir-imports.patch b/tasks/vendoring/patches/vendor/vistir-imports.patch index d1dc4e3a61..725e8a56d9 100644 --- a/tasks/vendoring/patches/vendor/vistir-imports.patch +++ b/tasks/vendoring/patches/vendor/vistir-imports.patch @@ -1,8 +1,8 @@ diff --git a/pipenv/vendor/vistir/backports/tempfile.py b/pipenv/vendor/vistir/backports/tempfile.py -index 483a479a..43470a6e 100644 +index f5594a2d..a3d7f3df 100644 --- a/pipenv/vendor/vistir/backports/tempfile.py +++ b/pipenv/vendor/vistir/backports/tempfile.py -@@ -13,7 +13,7 @@ import six +@@ -12,7 +12,7 @@ import six try: from weakref import finalize except ImportError: @@ -10,33 +10,35 @@ index 483a479a..43470a6e 100644 + from pipenv.vendor.backports.weakref import finalize - __all__ = ["finalize", "NamedTemporaryFile"] + def fs_encode(path): diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py -index 9ae33fdc..ec3b65cb 100644 +index b5904bc7..a44aafbe 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py -@@ -43,12 +43,12 @@ if sys.version_info >= (3, 5): - from functools import lru_cache - else: +@@ -43,7 +43,7 @@ __all__ = [ + if sys.version_info >= (3, 5): # pragma: no cover + from pathlib import Path + else: # pragma: no cover - from pathlib2 import Path -- from backports.functools_lru_cache import lru_cache + from pipenv.vendor.pathlib2 import Path -+ from pipenv.vendor.backports.functools_lru_cache import lru_cache - - if sys.version_info < (3, 3): + if six.PY3: # pragma: no cover + # Only Python 3.4+ is supported +@@ -53,14 +53,14 @@ if six.PY3: # pragma: no cover + from weakref import finalize + else: # pragma: no cover + # Only Python 2.7 is supported +- from backports.functools_lru_cache import lru_cache ++ from pipenv.vendor.backports.functools_lru_cache import lru_cache + from .backports.functools import partialmethod # type: ignore - from backports.shutil_get_terminal_size import get_terminal_size + from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size + from .backports.surrogateescape import register_surrogateescape + register_surrogateescape() NamedTemporaryFile = _NamedTemporaryFile - else: - from tempfile import NamedTemporaryFile -@@ -57,7 +57,7 @@ else: - try: - from weakref import finalize - except ImportError: - from backports.weakref import finalize # type: ignore + from pipenv.vendor.backports.weakref import finalize # type: ignore try: - from functools import partialmethod + # Introduced Python 3.5 From 916c53e37964fafad2ec18f87e6310ddb56d25b2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 10:42:09 -0400 Subject: [PATCH 32/81] Update all vendored dependencies Signed-off-by: Dan Ryan --- pipenv/patched/notpip/_internal/resolve.py | 2 +- pipenv/patched/piptools/repositories/pypi.py | 7 +- pipenv/vendor/backports/__init__.py | 2 +- pipenv/vendor/cerberus/__init__.py | 15 +- pipenv/vendor/cerberus/errors.py | 204 +-- pipenv/vendor/cerberus/platform.py | 26 + pipenv/vendor/cerberus/schema.py | 293 ++-- pipenv/vendor/cerberus/tests/__init__.py | 43 +- pipenv/vendor/cerberus/tests/conftest.py | 99 +- pipenv/vendor/cerberus/tests/test_assorted.py | 63 +- .../cerberus/tests/test_customization.py | 43 +- pipenv/vendor/cerberus/tests/test_errors.py | 207 ++- .../cerberus/tests/test_normalization.py | 368 +++-- .../vendor/cerberus/tests/test_registries.py | 24 +- pipenv/vendor/cerberus/tests/test_schema.py | 117 +- pipenv/vendor/cerberus/tests/test_utils.py | 11 + .../vendor/cerberus/tests/test_validation.py | 1283 +++++++++++------ pipenv/vendor/cerberus/utils.py | 59 +- pipenv/vendor/cerberus/validator.py | 806 +++++++---- pipenv/vendor/certifi/__init__.py | 2 +- pipenv/vendor/certifi/cacert.pem | 146 ++ pipenv/vendor/certifi/core.py | 5 - pipenv/vendor/click_completion/__init__.py | 2 +- pipenv/vendor/click_completion/core.py | 20 +- pipenv/vendor/click_completion/zsh.j2 | 1 - pipenv/vendor/distlib/__init__.py | 2 +- pipenv/vendor/distlib/index.py | 2 +- pipenv/vendor/distlib/locators.py | 6 +- pipenv/vendor/distlib/metadata.py | 8 +- pipenv/vendor/distlib/scripts.py | 26 +- pipenv/vendor/distlib/util.py | 6 +- pipenv/vendor/distlib/wheel.py | 30 +- pipenv/vendor/dotenv/__init__.py | 3 + pipenv/vendor/dotenv/cli.py | 9 +- pipenv/vendor/dotenv/compat.py | 11 +- pipenv/vendor/dotenv/environ.py | 54 - pipenv/vendor/dotenv/ipython.py | 6 +- pipenv/vendor/dotenv/main.py | 114 +- pipenv/vendor/dotenv/py.typed | 1 + pipenv/vendor/dotenv/version.py | 2 +- pipenv/vendor/parse.py | 7 +- pipenv/vendor/pexpect/__init__.py | 2 +- pipenv/vendor/pexpect/_async.py | 26 +- pipenv/vendor/pexpect/expect.py | 2 +- pipenv/vendor/pexpect/pty_spawn.py | 102 +- pipenv/vendor/pexpect/pxssh.py | 78 +- pipenv/vendor/pexpect/replwrap.py | 14 +- .../pythonfinder/_vendor/pep514tools/LICENSE | 21 + .../_vendor/pep514tools/_registry.py | 2 +- pipenv/vendor/pythonfinder/_vendor/vendor.txt | 2 +- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/utils.py | 2 +- pipenv/vendor/scandir.py | 4 +- pipenv/vendor/shellingham/__init__.py | 2 +- pipenv/vendor/shellingham/_core.py | 2 +- pipenv/vendor/shellingham/posix.py | 2 +- pipenv/vendor/shellingham/posix/_default.py | 27 - pipenv/vendor/shellingham/posix/_proc.py | 34 +- pipenv/vendor/shellingham/posix/_ps.py | 4 +- pipenv/vendor/shellingham/posix/linux.py | 35 - pipenv/vendor/urllib3/LICENSE.txt | 32 +- pipenv/vendor/urllib3/__init__.py | 3 +- pipenv/vendor/urllib3/connection.py | 38 +- pipenv/vendor/urllib3/connectionpool.py | 41 +- .../contrib/_securetransport/bindings.py | 14 +- pipenv/vendor/urllib3/contrib/pyopenssl.py | 35 +- .../vendor/urllib3/contrib/securetransport.py | 87 +- pipenv/vendor/urllib3/contrib/socks.py | 35 +- pipenv/vendor/urllib3/fields.py | 140 +- .../urllib3/packages/rfc3986/__init__.py | 56 + .../vendor/urllib3/packages/rfc3986/_mixin.py | 353 +++++ .../urllib3/packages/rfc3986/abnf_regexp.py | 267 ++++ pipenv/vendor/urllib3/packages/rfc3986/api.py | 106 ++ .../urllib3/packages/rfc3986/builder.py | 298 ++++ .../vendor/urllib3/packages/rfc3986/compat.py | 54 + .../urllib3/packages/rfc3986/exceptions.py | 118 ++ pipenv/vendor/urllib3/packages/rfc3986/iri.py | 147 ++ .../vendor/urllib3/packages/rfc3986/misc.py | 146 ++ .../urllib3/packages/rfc3986/normalizers.py | 167 +++ .../urllib3/packages/rfc3986/parseresult.py | 385 +++++ pipenv/vendor/urllib3/packages/rfc3986/uri.py | 153 ++ .../urllib3/packages/rfc3986/validators.py | 450 ++++++ pipenv/vendor/urllib3/poolmanager.py | 13 +- pipenv/vendor/urllib3/response.py | 69 +- pipenv/vendor/urllib3/util/__init__.py | 2 + pipenv/vendor/urllib3/util/request.py | 7 + pipenv/vendor/urllib3/util/retry.py | 3 +- pipenv/vendor/urllib3/util/ssl_.py | 111 +- pipenv/vendor/urllib3/util/timeout.py | 3 +- pipenv/vendor/urllib3/util/url.py | 215 ++- pipenv/vendor/vistir/backports/__init__.py | 3 +- .../vistir/backports/surrogateescape.py | 196 +++ pipenv/vendor/vistir/backports/tempfile.py | 20 +- pipenv/vendor/vistir/compat.py | 36 +- pipenv/vendor/vistir/environment.py | 6 + pipenv/vendor/vistir/misc.py | 91 +- pipenv/vendor/vistir/path.py | 28 +- pipenv/vendor/vistir/spin.py | 14 +- pipenv/vendor/yaspin/__version__.py | 2 +- pipenv/vendor/yaspin/core.py | 56 +- 100 files changed, 6443 insertions(+), 2055 deletions(-) create mode 100644 pipenv/vendor/cerberus/tests/test_utils.py delete mode 100644 pipenv/vendor/dotenv/environ.py create mode 100644 pipenv/vendor/dotenv/py.typed create mode 100644 pipenv/vendor/pythonfinder/_vendor/pep514tools/LICENSE delete mode 100644 pipenv/vendor/shellingham/posix/_default.py delete mode 100644 pipenv/vendor/shellingham/posix/linux.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/__init__.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/_mixin.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/abnf_regexp.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/api.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/builder.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/compat.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/exceptions.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/iri.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/misc.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/normalizers.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/parseresult.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/uri.py create mode 100644 pipenv/vendor/urllib3/packages/rfc3986/validators.py create mode 100644 pipenv/vendor/vistir/backports/surrogateescape.py create mode 100644 pipenv/vendor/vistir/environment.py diff --git a/pipenv/patched/notpip/_internal/resolve.py b/pipenv/patched/notpip/_internal/resolve.py index 36945de58f..e42dd3d4ef 100644 --- a/pipenv/patched/notpip/_internal/resolve.py +++ b/pipenv/patched/notpip/_internal/resolve.py @@ -19,8 +19,8 @@ UnsupportedPythonVersion, ) from pipenv.patched.notpip._internal.req.constructors import install_req_from_req_string -from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.req.req_install import InstallRequirement +from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.misc import dist_in_usersite, ensure_dir from pipenv.patched.notpip._internal.utils.packaging import check_dist_requires_python from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 8dd369acb9..4e44b90348 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -14,7 +14,7 @@ from packaging.specifiers import SpecifierSet, Specifier os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") -from pip_shims.shims import VcsSupport, WheelCache, InstallationError, pip_version +from pip_shims.shims import VcsSupport, WheelCache, InstallationError from pip_shims.shims import Resolver as PipResolver @@ -112,7 +112,7 @@ def __init__(self, pip_options, session, build_isolation=False, use_json=False): } # pip 19.0 has removed process_dependency_links from the PackageFinder constructor - if pkg_resources.parse_version(pip_version) < pkg_resources.parse_version('19.0'): + if pkg_resources.parse_version(pip_shims.shims.pip_version) < pkg_resources.parse_version('19.0'): finder_kwargs["process_dependency_links"] = pip_options.process_dependency_links self.finder = PackageFinder(**finder_kwargs) @@ -279,7 +279,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache): 'finder': self.finder, 'session': self.session, 'upgrade_strategy': "to-satisfy-only", - 'force_reinstall': True, + 'force_reinstall': False, 'ignore_dependencies': False, 'ignore_requires_python': True, 'ignore_installed': True, @@ -309,6 +309,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache): cleanup_fn() except OSError: pass + results = set(results) if results else set() return results, ireq diff --git a/pipenv/vendor/backports/__init__.py b/pipenv/vendor/backports/__init__.py index 0c64b4c10b..e449e521e1 100644 --- a/pipenv/vendor/backports/__init__.py +++ b/pipenv/vendor/backports/__init__.py @@ -1,5 +1,5 @@ __path__ = __import__('pkgutil').extend_path(__path__, __name__) from . import weakref -from . import enum from . import shutil_get_terminal_size +from . import enum from . import functools_lru_cache diff --git a/pipenv/vendor/cerberus/__init__.py b/pipenv/vendor/cerberus/__init__.py index 4b528cffd6..1e0f0d5488 100644 --- a/pipenv/vendor/cerberus/__init__.py +++ b/pipenv/vendor/cerberus/__init__.py @@ -10,20 +10,23 @@ from __future__ import absolute_import +from pkg_resources import get_distribution, DistributionNotFound + from cerberus.validator import DocumentError, Validator -from cerberus.schema import (rules_set_registry, schema_registry, Registry, - SchemaError) +from cerberus.schema import rules_set_registry, schema_registry, SchemaError from cerberus.utils import TypeDefinition -__version__ = "1.2" +try: + __version__ = get_distribution("Cerberus").version +except DistributionNotFound: + __version__ = "unknown" __all__ = [ DocumentError.__name__, - Registry.__name__, SchemaError.__name__, TypeDefinition.__name__, Validator.__name__, - 'schema_registry', - 'rules_set_registry' + "schema_registry", + "rules_set_registry", ] diff --git a/pipenv/vendor/cerberus/errors.py b/pipenv/vendor/cerberus/errors.py index 4c497eebc4..14e27eb84b 100644 --- a/pipenv/vendor/cerberus/errors.py +++ b/pipenv/vendor/cerberus/errors.py @@ -3,12 +3,12 @@ from __future__ import absolute_import -from collections import defaultdict, namedtuple, MutableMapping +from collections import defaultdict, namedtuple from copy import copy, deepcopy from functools import wraps from pprint import pformat -from cerberus.platform import PYTHON_VERSION +from cerberus.platform import PYTHON_VERSION, MutableMapping from cerberus.utils import compare_paths_lt, quote_string @@ -54,6 +54,7 @@ UNALLOWED_VALUES = ErrorDefinition(0x45, 'allowed') FORBIDDEN_VALUE = ErrorDefinition(0x46, 'forbidden') FORBIDDEN_VALUES = ErrorDefinition(0x47, 'forbidden') +MISSING_MEMBERS = ErrorDefinition(0x48, 'contains') # other NORMALIZATION = ErrorDefinition(0x60, None) @@ -66,9 +67,10 @@ ERROR_GROUP = ErrorDefinition(0x80, None) MAPPING_SCHEMA = ErrorDefinition(0x81, 'schema') SEQUENCE_SCHEMA = ErrorDefinition(0x82, 'schema') -KEYSCHEMA = ErrorDefinition(0x83, 'keyschema') -VALUESCHEMA = ErrorDefinition(0x84, 'valueschema') -BAD_ITEMS = ErrorDefinition(0x8f, 'items') +# TODO remove KEYSCHEMA AND VALUESCHEMA with next major release +KEYSRULES = KEYSCHEMA = ErrorDefinition(0x83, 'keysrules') +VALUESRULES = VALUESCHEMA = ErrorDefinition(0x84, 'valuesrules') +BAD_ITEMS = ErrorDefinition(0x8F, 'items') LOGICAL = ErrorDefinition(0x90, None) NONEOF = ErrorDefinition(0x91, 'noneof') @@ -79,8 +81,7 @@ """ SchemaError messages """ -SCHEMA_ERROR_DEFINITION_TYPE = \ - "schema definition for field '{0}' must be a dict" +SCHEMA_ERROR_DEFINITION_TYPE = "schema definition for field '{0}' must be a dict" SCHEMA_ERROR_MISSING = "validation schema missing" @@ -89,8 +90,8 @@ class ValidationError(object): """ A simple class to store and query basic error information. """ - def __init__(self, document_path, schema_path, code, rule, constraint, - value, info): + + def __init__(self, document_path, schema_path, code, rule, constraint, value, info): self.document_path = document_path """ The path to the field within the document that caused the error. Type: :class:`tuple` """ @@ -115,8 +116,7 @@ def __eq__(self, other): def __hash__(self): """ Expects that all other properties are transitively determined. """ - return hash(self.document_path) ^ hash(self.schema_path) \ - ^ hash(self.code) + return hash(self.document_path) ^ hash(self.schema_path) ^ hash(self.code) def __lt__(self, other): if self.document_path != other.document_path: @@ -125,20 +125,24 @@ def __lt__(self, other): return compare_paths_lt(self.schema_path, other.schema_path) def __repr__(self): - return "{class_name} @ {memptr} ( " \ - "document_path={document_path}," \ - "schema_path={schema_path}," \ - "code={code}," \ - "constraint={constraint}," \ - "value={value}," \ - "info={info} )"\ - .format(class_name=self.__class__.__name__, memptr=hex(id(self)), # noqa: E501 - document_path=self.document_path, - schema_path=self.schema_path, - code=hex(self.code), - constraint=quote_string(self.constraint), - value=quote_string(self.value), - info=self.info) + return ( + "{class_name} @ {memptr} ( " + "document_path={document_path}," + "schema_path={schema_path}," + "code={code}," + "constraint={constraint}," + "value={value}," + "info={info} )".format( + class_name=self.__class__.__name__, + memptr=hex(id(self)), # noqa: E501 + document_path=self.document_path, + schema_path=self.schema_path, + code=hex(self.code), + constraint=quote_string(self.constraint), + value=quote_string(self.value), + info=self.info, + ) + ) @property def child_errors(self): @@ -190,11 +194,13 @@ class ErrorList(list): """ A list for :class:`~cerberus.errors.ValidationError` instances that can be queried with the ``in`` keyword for a particular :class:`~cerberus.errors.ErrorDefinition`. """ + def __contains__(self, error_definition): - for code in (x.code for x in self): - if code == error_definition.code: - return True - return False + if not isinstance(error_definition, ErrorDefinition): + raise TypeError + + wanted_code = error_definition.code + return any(x.code == wanted_code for x in self) class ErrorTreeNode(MutableMapping): @@ -203,14 +209,10 @@ class ErrorTreeNode(MutableMapping): def __init__(self, path, parent_node): self.parent_node = parent_node self.tree_root = self.parent_node.tree_root - self.path = path[:self.parent_node.depth + 1] + self.path = path[: self.parent_node.depth + 1] self.errors = ErrorList() self.descendants = {} - def __add__(self, error): - self.add(error) - return self - def __contains__(self, item): if isinstance(item, ErrorDefinition): return item in self.errors @@ -228,6 +230,7 @@ def __getitem__(self, item): for error in self.errors: if item.code == error.code: return error + return None else: return self.descendants.get(item) @@ -258,14 +261,16 @@ def add(self, error): if key not in self.descendants: self[key] = ErrorTreeNode(error_path, self) + node = self[key] + if len(error_path) == self.depth + 1: - self[key].errors.append(error) - self[key].errors.sort() + node.errors.append(error) + node.errors.sort() if error.is_group_error: for child_error in error.child_errors: - self.tree_root += child_error + self.tree_root.add(child_error) else: - self[key] += error + node.add(error) def _path_of_(self, error): return getattr(error, self.tree_type + '_path') @@ -274,14 +279,15 @@ def _path_of_(self, error): class ErrorTree(ErrorTreeNode): """ Base class for :class:`~cerberus.errors.DocumentErrorTree` and :class:`~cerberus.errors.SchemaErrorTree`. """ - def __init__(self, errors=[]): + + def __init__(self, errors=()): self.parent_node = None self.tree_root = self self.path = () self.errors = ErrorList() self.descendants = {} for error in errors: - self += error + self.add(error) def add(self, error): """ Add an error to the tree. @@ -323,18 +329,21 @@ def fetch_node_from(self, path): class DocumentErrorTree(ErrorTree): """ Implements a dict-like class to query errors by indexes following the structure of a validated document. """ + tree_type = 'document' class SchemaErrorTree(ErrorTree): """ Implements a dict-like class to query errors by indexes following the structure of the used schema. """ + tree_type = 'schema' class BaseErrorHandler(object): """ Base class for all error handlers. Subclasses are identified as error-handlers with an instance-test. """ + def __init__(self, *args, **kwargs): """ Optionally initialize a new instance. """ pass @@ -411,9 +420,9 @@ def encode_unicode(f): This decorator ensures that if legacy Python is used unicode strings are encoded before passing to a function. """ + @wraps(f) def wrapped(obj, error): - def _encode(value): """Helper encoding unicode strings into binary utf-8""" if isinstance(value, unicode): # noqa: F821 @@ -436,56 +445,52 @@ class BasicErrorHandler(BaseErrorHandler): through :class:`str` a pretty-formatted representation of that tree is returned. """ - messages = {0x00: "{0}", - - 0x01: "document is missing", - 0x02: "required field", - 0x03: "unknown field", - 0x04: "field '{0}' is required", - 0x05: "depends on these values: {constraint}", - 0x06: "{0} must not be present with '{field}'", - - 0x21: "'{0}' is not a document, must be a dict", - 0x22: "empty values not allowed", - 0x23: "null value not allowed", - 0x24: "must be of {constraint} type", - 0x25: "must be of dict type", - 0x26: "length of list should be {constraint}, it is {0}", - 0x27: "min length is {constraint}", - 0x28: "max length is {constraint}", - - 0x41: "value does not match regex '{constraint}'", - 0x42: "min value is {constraint}", - 0x43: "max value is {constraint}", - 0x44: "unallowed value {value}", - 0x45: "unallowed values {0}", - 0x46: "unallowed value {value}", - 0x47: "unallowed values {0}", - - 0x61: "field '{field}' cannot be coerced: {0}", - 0x62: "field '{field}' cannot be renamed: {0}", - 0x63: "field is read-only", - 0x64: "default value for '{field}' cannot be set: {0}", - - 0x81: "mapping doesn't validate subschema: {0}", - 0x82: "one or more sequence-items don't validate: {0}", - 0x83: "one or more keys of a mapping don't validate: {0}", - 0x84: "one or more values in a mapping don't validate: {0}", - 0x85: "one or more sequence-items don't validate: {0}", - - 0x91: "one or more definitions validate", - 0x92: "none or more than one rule validate", - 0x93: "no definitions validate", - 0x94: "one or more definitions don't validate" - } + + messages = { + 0x00: "{0}", + 0x01: "document is missing", + 0x02: "required field", + 0x03: "unknown field", + 0x04: "field '{0}' is required", + 0x05: "depends on these values: {constraint}", + 0x06: "{0} must not be present with '{field}'", + 0x21: "'{0}' is not a document, must be a dict", + 0x22: "empty values not allowed", + 0x23: "null value not allowed", + 0x24: "must be of {constraint} type", + 0x25: "must be of dict type", + 0x26: "length of list should be {constraint}, it is {0}", + 0x27: "min length is {constraint}", + 0x28: "max length is {constraint}", + 0x41: "value does not match regex '{constraint}'", + 0x42: "min value is {constraint}", + 0x43: "max value is {constraint}", + 0x44: "unallowed value {value}", + 0x45: "unallowed values {0}", + 0x46: "unallowed value {value}", + 0x47: "unallowed values {0}", + 0x48: "missing members {0}", + 0x61: "field '{field}' cannot be coerced: {0}", + 0x62: "field '{field}' cannot be renamed: {0}", + 0x63: "field is read-only", + 0x64: "default value for '{field}' cannot be set: {0}", + 0x81: "mapping doesn't validate subschema: {0}", + 0x82: "one or more sequence-items don't validate: {0}", + 0x83: "one or more keys of a mapping don't validate: {0}", + 0x84: "one or more values in a mapping don't validate: {0}", + 0x85: "one or more sequence-items don't validate: {0}", + 0x91: "one or more definitions validate", + 0x92: "none or more than one rule validate", + 0x93: "no definitions validate", + 0x94: "one or more definitions don't validate", + } def __init__(self, tree=None): self.tree = {} if tree is None else tree - def __call__(self, errors=None): - if errors is not None: - self.clear() - self.extend(errors) + def __call__(self, errors): + self.clear() + self.extend(errors) return self.pretty_tree def __str__(self): @@ -511,8 +516,9 @@ def add(self, error): elif error.is_group_error: self._insert_group_error(error) elif error.code in self.messages: - self._insert_error(error.document_path, - self._format_message(error.field, error)) + self._insert_error( + error.document_path, self._format_message(error.field, error) + ) def clear(self): self.tree = {} @@ -522,8 +528,8 @@ def start(self, validator): def _format_message(self, field, error): return self.messages[error.code].format( - *error.info, constraint=error.constraint, - field=field, value=error.value) + *error.info, constraint=error.constraint, field=field, value=error.value + ) def _insert_error(self, path, node): """ Adds an error or sub-tree to :attr:tree. @@ -559,14 +565,14 @@ def _insert_group_error(self, error): elif child_error.is_group_error: self._insert_group_error(child_error) else: - self._insert_error(child_error.document_path, - self._format_message(child_error.field, - child_error)) + self._insert_error( + child_error.document_path, + self._format_message(child_error.field, child_error), + ) def _insert_logic_error(self, error): field = error.field - self._insert_error(error.document_path, - self._format_message(field, error)) + self._insert_error(error.document_path, self._format_message(field, error)) for definition_errors in error.definitions_errors.values(): for child_error in definition_errors: @@ -575,8 +581,10 @@ def _insert_logic_error(self, error): elif child_error.is_group_error: self._insert_group_error(child_error) else: - self._insert_error(child_error.document_path, - self._format_message(field, child_error)) + self._insert_error( + child_error.document_path, + self._format_message(field, child_error), + ) def _purge_empty_dicts(self, error_list): subtree = error_list[-1] diff --git a/pipenv/vendor/cerberus/platform.py b/pipenv/vendor/cerberus/platform.py index eca9858d5e..66b1d5f0dc 100644 --- a/pipenv/vendor/cerberus/platform.py +++ b/pipenv/vendor/cerberus/platform.py @@ -12,3 +12,29 @@ else: _str_type = str _int_types = (int,) + + +if PYTHON_VERSION < 3.3: + from collections import ( # noqa: F401 + Callable, + Container, + Hashable, + Iterable, + Mapping, + MutableMapping, + Sequence, + Set, + Sized, + ) +else: + from collections.abc import ( # noqa: F401 + Callable, + Container, + Hashable, + Iterable, + Mapping, + MutableMapping, + Sequence, + Set, + Sized, + ) diff --git a/pipenv/vendor/cerberus/schema.py b/pipenv/vendor/cerberus/schema.py index 3ddce17241..305e59ff56 100644 --- a/pipenv/vendor/cerberus/schema.py +++ b/pipenv/vendor/cerberus/schema.py @@ -1,13 +1,23 @@ from __future__ import absolute_import -from collections import (Callable, Hashable, Iterable, Mapping, - MutableMapping, Sequence) from copy import copy +from warnings import warn from cerberus import errors -from cerberus.platform import _str_type -from cerberus.utils import (get_Validator_class, validator_factory, - mapping_hash, TypeDefinition) +from cerberus.platform import ( + _str_type, + Callable, + Hashable, + Mapping, + MutableMapping, + Sequence, +) +from cerberus.utils import ( + get_Validator_class, + validator_factory, + mapping_hash, + TypeDefinition, +) class _Abort(Exception): @@ -17,6 +27,7 @@ class _Abort(Exception): class SchemaError(Exception): """ Raised when the validation schema is missing, has the wrong format or contains errors. """ + pass @@ -26,18 +37,19 @@ class DefinitionSchema(MutableMapping): def __new__(cls, *args, **kwargs): if 'SchemaValidator' not in globals(): global SchemaValidator - SchemaValidator = validator_factory('SchemaValidator', - SchemaValidatorMixin) + SchemaValidator = validator_factory('SchemaValidator', SchemaValidatorMixin) types_mapping = SchemaValidator.types_mapping.copy() - types_mapping.update({ - 'callable': TypeDefinition('callable', (Callable,), ()), - 'hashable': TypeDefinition('hashable', (Hashable,), ()) - }) + types_mapping.update( + { + 'callable': TypeDefinition('callable', (Callable,), ()), + 'hashable': TypeDefinition('hashable', (Hashable,), ()), + } + ) SchemaValidator.types_mapping = types_mapping return super(DefinitionSchema, cls).__new__(cls) - def __init__(self, validator, schema={}): + def __init__(self, validator, schema): """ :param validator: An instance of Validator-(sub-)class that uses this schema. @@ -45,8 +57,7 @@ def __init__(self, validator, schema={}): one. """ if not isinstance(validator, get_Validator_class()): - raise RuntimeError('validator argument must be a Validator-' - 'instance.') + raise RuntimeError('validator argument must be a Validator-' 'instance.') self.validator = validator if isinstance(schema, _str_type): @@ -56,14 +67,16 @@ def __init__(self, validator, schema={}): try: schema = dict(schema) except Exception: - raise SchemaError( - errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema)) + raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema)) self.validation_schema = SchemaValidationSchema(validator) self.schema_validator = SchemaValidator( - None, allow_unknown=self.validation_schema, + None, + allow_unknown=self.validation_schema, error_handler=errors.SchemaErrorHandler, - target_schema=schema, target_validator=validator) + target_schema=schema, + target_validator=validator, + ) schema = self.expand(schema) self.validate(schema) @@ -110,6 +123,10 @@ def expand(cls, schema): schema = cls._expand_subschemas(schema) except Exception: pass + + # TODO remove this with the next major release + schema = cls._rename_deprecated_rulenames(schema) + return schema @classmethod @@ -119,13 +136,15 @@ def _expand_logical_shortcuts(cls, schema): :param schema: The schema-definition to expand. :return: The expanded schema-definition. """ + def is_of_rule(x): - return isinstance(x, _str_type) and \ - x.startswith(('allof_', 'anyof_', 'noneof_', 'oneof_')) + return isinstance(x, _str_type) and x.startswith( + ('allof_', 'anyof_', 'noneof_', 'oneof_') + ) for field in schema: for of_rule in (x for x in schema[field] if is_of_rule(x)): - operator, rule = of_rule.split('_') + operator, rule = of_rule.split('_', 1) schema[field].update({operator: []}) for value in schema[field][of_rule]: schema[field][operator].append({rule: value}) @@ -135,15 +154,15 @@ def is_of_rule(x): @classmethod def _expand_subschemas(cls, schema): def has_schema_rule(): - return isinstance(schema[field], Mapping) and \ - 'schema' in schema[field] + return isinstance(schema[field], Mapping) and 'schema' in schema[field] def has_mapping_schema(): """ Tries to determine heuristically if the schema-constraints are aimed to mappings. """ try: - return all(isinstance(x, Mapping) for x - in schema[field]['schema'].values()) + return all( + isinstance(x, Mapping) for x in schema[field]['schema'].values() + ) except TypeError: return False @@ -153,13 +172,12 @@ def has_mapping_schema(): elif has_mapping_schema(): schema[field]['schema'] = cls.expand(schema[field]['schema']) else: # assumes schema-constraints for a sequence - schema[field]['schema'] = \ - cls.expand({0: schema[field]['schema']})[0] + schema[field]['schema'] = cls.expand({0: schema[field]['schema']})[0] - for rule in ('keyschema', 'valueschema'): + # TODO remove the last two values in the tuple with the next major release + for rule in ('keysrules', 'valuesrules', 'keyschema', 'valueschema'): if rule in schema[field]: - schema[field][rule] = \ - cls.expand({0: schema[field][rule]})[0] + schema[field][rule] = cls.expand({0: schema[field][rule]})[0] for rule in ('allof', 'anyof', 'items', 'noneof', 'oneof'): if rule in schema[field]: @@ -171,6 +189,12 @@ def has_mapping_schema(): schema[field][rule] = new_rules_definition return schema + def get(self, item, default=None): + return self.schema.get(item, default) + + def items(self): + return self.schema.items() + def update(self, schema): try: schema = self.expand(schema) @@ -178,31 +202,64 @@ def update(self, schema): _new_schema.update(schema) self.validate(_new_schema) except ValueError: - raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE - .format(schema)) + raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema)) except Exception as e: raise e else: self.schema = _new_schema + # TODO remove with next major release + @staticmethod + def _rename_deprecated_rulenames(schema): + for field, rules in schema.items(): + + if isinstance(rules, str): # registry reference + continue + + for old, new in ( + ('keyschema', 'keysrules'), + ('validator', 'check_with'), + ('valueschema', 'valuesrules'), + ): + + if old not in rules: + continue + + if new in rules: + raise RuntimeError( + "The rule '{new}' is also present with its old " + "name '{old}' in the same set of rules." + ) + + warn( + "The rule '{old}' was renamed to '{new}'. The old name will " + "not be available in the next major release of " + "Cerberus.".format(old=old, new=new), + DeprecationWarning, + ) + schema[field][new] = schema[field][old] + schema[field].pop(old) + + return schema + def regenerate_validation_schema(self): self.validation_schema = SchemaValidationSchema(self.validator) def validate(self, schema=None): + """ Validates a schema that defines rules against supported rules. + + :param schema: The schema to be validated as a legal cerberus schema + according to the rules of the assigned Validator object. + Raises a :class:`~cerberus.base.SchemaError` when an invalid + schema is encountered. """ if schema is None: schema = self.schema - _hash = (mapping_hash(schema), - mapping_hash(self.validator.types_mapping)) + _hash = (mapping_hash(schema), mapping_hash(self.validator.types_mapping)) if _hash not in self.validator._valid_schemas: self._validate(schema) self.validator._valid_schemas.add(_hash) def _validate(self, schema): - """ Validates a schema that defines rules against supported rules. - - :param schema: The schema to be validated as a legal cerberus schema - according to the rules of this Validator object. - """ if isinstance(schema, _str_type): schema = self.validator.schema_registry.get(schema, schema) @@ -212,8 +269,7 @@ def _validate(self, schema): schema = copy(schema) for field in schema: if isinstance(schema[field], _str_type): - schema[field] = rules_set_registry.get(schema[field], - schema[field]) + schema[field] = rules_set_registry.get(schema[field], schema[field]) if not self.schema_validator(schema, normalize=False): raise SchemaError(self.schema_validator.errors) @@ -236,31 +292,31 @@ def copy(self): class SchemaValidationSchema(UnvalidatedSchema): def __init__(self, validator): - self.schema = {'allow_unknown': False, - 'schema': validator.rules, - 'type': 'dict'} + self.schema = { + 'allow_unknown': False, + 'schema': validator.rules, + 'type': 'dict', + } class SchemaValidatorMixin(object): - """ This validator is extended to validate schemas passed to a Cerberus + """ This validator mixin provides mechanics to validate schemas passed to a Cerberus validator. """ + + def __init__(self, *args, **kwargs): + kwargs.setdefault('known_rules_set_refs', set()) + kwargs.setdefault('known_schema_refs', set()) + super(SchemaValidatorMixin, self).__init__(*args, **kwargs) + @property def known_rules_set_refs(self): """ The encountered references to rules set registry items. """ - return self._config.get('known_rules_set_refs', ()) - - @known_rules_set_refs.setter - def known_rules_set_refs(self, value): - self._config['known_rules_set_refs'] = value + return self._config['known_rules_set_refs'] @property def known_schema_refs(self): """ The encountered references to schema registry items. """ - return self._config.get('known_schema_refs', ()) - - @known_schema_refs.setter - def known_schema_refs(self, value): - self._config['known_schema_refs'] = value + return self._config['known_schema_refs'] @property def target_schema(self): @@ -272,35 +328,13 @@ def target_validator(self): """ The validator whose schema is being validated. """ return self._config['target_validator'] - def _validate_logical(self, rule, field, value): - """ {'allowed': ('allof', 'anyof', 'noneof', 'oneof')} """ - if not isinstance(value, Sequence): - self._error(field, errors.BAD_TYPE) - return - - validator = self._get_child_validator( - document_crumb=rule, allow_unknown=False, - schema=self.target_validator.validation_rules) - - for constraints in value: - _hash = (mapping_hash({'turing': constraints}), - mapping_hash(self.target_validator.types_mapping)) - if _hash in self.target_validator._valid_schemas: - continue - - validator(constraints, normalize=False) - if validator._errors: - self._error(validator._errors) - else: - self.target_validator._valid_schemas.add(_hash) - - def _validator_bulk_schema(self, field, value): + def _check_with_bulk_schema(self, field, value): # resolve schema registry reference if isinstance(value, _str_type): if value in self.known_rules_set_refs: return else: - self.known_rules_set_refs += (value,) + self.known_rules_set_refs.add(value) definition = self.target_validator.rules_set_registry.get(value) if definition is None: self._error(field, 'Rules set definition %s not found.' % value) @@ -308,28 +342,32 @@ def _validator_bulk_schema(self, field, value): else: value = definition - _hash = (mapping_hash({'turing': value}), - mapping_hash(self.target_validator.types_mapping)) + _hash = ( + mapping_hash({'turing': value}), + mapping_hash(self.target_validator.types_mapping), + ) if _hash in self.target_validator._valid_schemas: return validator = self._get_child_validator( - document_crumb=field, allow_unknown=False, - schema=self.target_validator.rules) + document_crumb=field, + allow_unknown=False, + schema=self.target_validator.rules, + ) validator(value, normalize=False) if validator._errors: self._error(validator._errors) else: self.target_validator._valid_schemas.add(_hash) - def _validator_dependencies(self, field, value): + def _check_with_dependencies(self, field, value): if isinstance(value, _str_type): pass elif isinstance(value, Mapping): validator = self._get_child_validator( document_crumb=field, - schema={'valueschema': {'type': 'list'}}, - allow_unknown=True + schema={'valuesrules': {'type': 'list'}}, + allow_unknown=True, ) if not validator(value, normalize=False): self._error(validator._errors) @@ -338,48 +376,53 @@ def _validator_dependencies(self, field, value): path = self.document_path + (field,) self._error(path, 'All dependencies must be a hashable type.') - def _validator_handler(self, field, value): - if isinstance(value, Callable): - return - if isinstance(value, _str_type): - if value not in self.target_validator.validators + \ - self.target_validator.coercers: - self._error(field, '%s is no valid coercer' % value) - elif isinstance(value, Iterable): - for handler in value: - self._validator_handler(field, handler) - - def _validator_items(self, field, value): + def _check_with_items(self, field, value): for i, schema in enumerate(value): - self._validator_bulk_schema((field, i), schema) + self._check_with_bulk_schema((field, i), schema) - def _validator_schema(self, field, value): + def _check_with_schema(self, field, value): try: value = self._handle_schema_reference_for_validator(field, value) except _Abort: return - _hash = (mapping_hash(value), - mapping_hash(self.target_validator.types_mapping)) + _hash = (mapping_hash(value), mapping_hash(self.target_validator.types_mapping)) if _hash in self.target_validator._valid_schemas: return validator = self._get_child_validator( - document_crumb=field, - schema=None, allow_unknown=self.root_allow_unknown) + document_crumb=field, schema=None, allow_unknown=self.root_allow_unknown + ) validator(self._expand_rules_set_refs(value), normalize=False) if validator._errors: self._error(validator._errors) else: self.target_validator._valid_schemas.add(_hash) + def _check_with_type(self, field, value): + value = set((value,)) if isinstance(value, _str_type) else set(value) + invalid_constraints = value - set(self.target_validator.types) + if invalid_constraints: + self._error( + field, 'Unsupported types: {}'.format(', '.join(invalid_constraints)) + ) + + def _expand_rules_set_refs(self, schema): + result = {} + for k, v in schema.items(): + if isinstance(v, _str_type): + result[k] = self.target_validator.rules_set_registry.get(v) + else: + result[k] = v + return result + def _handle_schema_reference_for_validator(self, field, value): if not isinstance(value, _str_type): return value if value in self.known_schema_refs: raise _Abort - self.known_schema_refs += (value,) + self.known_schema_refs.add(value) definition = self.target_validator.schema_registry.get(value) if definition is None: path = self.document_path + (field,) @@ -387,24 +430,32 @@ def _handle_schema_reference_for_validator(self, field, value): raise _Abort return definition - def _expand_rules_set_refs(self, schema): - result = {} - for k, v in schema.items(): - if isinstance(v, _str_type): - result[k] = self.target_validator.rules_set_registry.get(v) + def _validate_logical(self, rule, field, value): + """ {'allowed': ('allof', 'anyof', 'noneof', 'oneof')} """ + if not isinstance(value, Sequence): + self._error(field, errors.BAD_TYPE) + return + + validator = self._get_child_validator( + document_crumb=rule, + allow_unknown=False, + schema=self.target_validator.validation_rules, + ) + + for constraints in value: + _hash = ( + mapping_hash({'turing': constraints}), + mapping_hash(self.target_validator.types_mapping), + ) + if _hash in self.target_validator._valid_schemas: + continue + + validator(constraints, normalize=False) + if validator._errors: + self._error(validator._errors) else: - result[k] = v - return result + self.target_validator._valid_schemas.add(_hash) - def _validator_type(self, field, value): - value = (value,) if isinstance(value, _str_type) else value - invalid_constraints = () - for constraint in value: - if constraint not in self.target_validator.types: - invalid_constraints += (constraint,) - if invalid_constraints: - path = self.document_path + (field,) - self._error(path, 'Unsupported types: %s' % invalid_constraints) #### diff --git a/pipenv/vendor/cerberus/tests/__init__.py b/pipenv/vendor/cerberus/tests/__init__.py index cc1c27dcc2..c014f3b1c6 100644 --- a/pipenv/vendor/cerberus/tests/__init__.py +++ b/pipenv/vendor/cerberus/tests/__init__.py @@ -1,22 +1,23 @@ # -*- coding: utf-8 -*- +import re + import pytest from cerberus import errors, Validator, SchemaError, DocumentError from cerberus.tests.conftest import sample_schema -def assert_exception(exception, document={}, schema=None, validator=None, - msg=None): +def assert_exception(exception, document={}, schema=None, validator=None, msg=None): """ Tests whether a specific exception is raised. Optionally also tests whether the exception message is as expected. """ if validator is None: validator = Validator() if msg is None: - with pytest.raises(exception) as excinfo: + with pytest.raises(exception): validator(document, schema) else: - with pytest.raises(exception, message=msg) as excinfo: # noqa: F841 + with pytest.raises(exception, match=re.escape(msg)): validator(document, schema) @@ -32,8 +33,15 @@ def assert_document_error(*args): assert_exception(DocumentError, *args) -def assert_fail(document, schema=None, validator=None, update=False, - error=None, errors=None, child_errors=None): +def assert_fail( + document, + schema=None, + validator=None, + update=False, + error=None, + errors=None, + child_errors=None, +): """ Tests whether a validation fails. """ if validator is None: validator = Validator(sample_schema) @@ -45,8 +53,7 @@ def assert_fail(document, schema=None, validator=None, update=False, assert not (error is not None and errors is not None) assert not (errors is not None and child_errors is not None), ( - 'child_errors can only be tested in ' - 'conjunction with the error parameter' + 'child_errors can only be tested in ' 'conjunction with the error parameter' ) assert not (child_errors is not None and error is None) if error is not None: @@ -99,7 +106,8 @@ def assert_has_error(_errors, d_path, s_path, error_def, constraint, info=()): else: break else: - raise AssertionError(""" + raise AssertionError( + """ Error with properties: document_path={doc_path} schema_path={schema_path} @@ -108,9 +116,15 @@ def assert_has_error(_errors, d_path, s_path, error_def, constraint, info=()): info={info} not found in errors: {errors} - """.format(doc_path=d_path, schema_path=s_path, - code=hex(error.code), info=info, - constraint=constraint, errors=_errors)) + """.format( + doc_path=d_path, + schema_path=s_path, + code=hex(error.code), + info=info, + constraint=constraint, + errors=_errors, + ) + ) return i @@ -133,8 +147,9 @@ def assert_not_has_error(_errors, *args, **kwargs): def assert_bad_type(field, data_type, value): - assert_fail({field: value}, - error=(field, (field, 'type'), errors.BAD_TYPE, data_type)) + assert_fail( + {field: value}, error=(field, (field, 'type'), errors.BAD_TYPE, data_type) + ) def assert_normalized(document, expected, schema=None, validator=None): diff --git a/pipenv/vendor/cerberus/tests/conftest.py b/pipenv/vendor/cerberus/tests/conftest.py index 3b4395ea75..776c97bcc3 100644 --- a/pipenv/vendor/cerberus/tests/conftest.py +++ b/pipenv/vendor/cerberus/tests/conftest.py @@ -23,67 +23,27 @@ def validator(): sample_schema = { - 'a_string': { - 'type': 'string', - 'minlength': 2, - 'maxlength': 10 - }, - 'a_binary': { - 'type': 'binary', - 'minlength': 2, - 'maxlength': 10 - }, - 'a_nullable_integer': { - 'type': 'integer', - 'nullable': True - }, - 'an_integer': { - 'type': 'integer', - 'min': 1, - 'max': 100, - }, - 'a_restricted_integer': { - 'type': 'integer', - 'allowed': [-1, 0, 1], - }, - 'a_boolean': { - 'type': 'boolean', - }, - 'a_datetime': { - 'type': 'datetime', - }, - 'a_float': { - 'type': 'float', - 'min': 1, - 'max': 100, - }, - 'a_number': { - 'type': 'number', - 'min': 1, - 'max': 100, - }, - 'a_set': { - 'type': 'set', - }, - 'one_or_more_strings': { - 'type': ['string', 'list'], - 'schema': {'type': 'string'} - }, + 'a_string': {'type': 'string', 'minlength': 2, 'maxlength': 10}, + 'a_binary': {'type': 'binary', 'minlength': 2, 'maxlength': 10}, + 'a_nullable_integer': {'type': 'integer', 'nullable': True}, + 'an_integer': {'type': 'integer', 'min': 1, 'max': 100}, + 'a_restricted_integer': {'type': 'integer', 'allowed': [-1, 0, 1]}, + 'a_boolean': {'type': 'boolean', 'meta': 'can haz two distinct states'}, + 'a_datetime': {'type': 'datetime', 'meta': {'format': '%a, %d. %b %Y'}}, + 'a_float': {'type': 'float', 'min': 1, 'max': 100}, + 'a_number': {'type': 'number', 'min': 1, 'max': 100}, + 'a_set': {'type': 'set'}, + 'one_or_more_strings': {'type': ['string', 'list'], 'schema': {'type': 'string'}}, 'a_regex_email': { 'type': 'string', - 'regex': '^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$' + 'regex': r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', }, - 'a_readonly_string': { - 'type': 'string', - 'readonly': True, - }, - 'a_restricted_string': { - 'type': 'string', - 'allowed': ["agent", "client", "vendor"], - }, - 'an_array': { + 'a_readonly_string': {'type': 'string', 'readonly': True}, + 'a_restricted_string': {'type': 'string', 'allowed': ['agent', 'client', 'vendor']}, + 'an_array': {'type': 'list', 'allowed': ['agent', 'client', 'vendor']}, + 'an_array_from_set': { 'type': 'list', - 'allowed': ["agent", "client", "vendor"], + 'allowed': set(['agent', 'client', 'vendor']), }, 'a_list_of_dicts': { 'type': 'list', @@ -97,38 +57,25 @@ def validator(): }, 'a_list_of_values': { 'type': 'list', - 'items': [{'type': 'string'}, {'type': 'integer'}, ] - }, - 'a_list_of_integers': { - 'type': 'list', - 'schema': {'type': 'integer'}, + 'items': [{'type': 'string'}, {'type': 'integer'}], }, + 'a_list_of_integers': {'type': 'list', 'schema': {'type': 'integer'}}, 'a_dict': { 'type': 'dict', 'schema': { 'address': {'type': 'string'}, - 'city': {'type': 'string', 'required': True} + 'city': {'type': 'string', 'required': True}, }, }, - 'a_dict_with_valueschema': { - 'type': 'dict', - 'valueschema': {'type': 'integer'} - }, - 'a_dict_with_keyschema': { - 'type': 'dict', - 'keyschema': {'type': 'string', 'regex': '[a-z]+'} - }, + 'a_dict_with_valuesrules': {'type': 'dict', 'valuesrules': {'type': 'integer'}}, 'a_list_length': { 'type': 'list', 'schema': {'type': 'integer'}, 'minlength': 2, 'maxlength': 5, }, - 'a_nullable_field_without_type': { - 'nullable': True - }, - 'a_not_nullable_field_without_type': { - }, + 'a_nullable_field_without_type': {'nullable': True}, + 'a_not_nullable_field_without_type': {}, } sample_document = {'name': 'john doe'} diff --git a/pipenv/vendor/cerberus/tests/test_assorted.py b/pipenv/vendor/cerberus/tests/test_assorted.py index 641adb7e64..b84ef81032 100644 --- a/pipenv/vendor/cerberus/tests/test_assorted.py +++ b/pipenv/vendor/cerberus/tests/test_assorted.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from decimal import Decimal +from pkg_resources import Distribution, DistributionNotFound from pytest import mark @@ -8,6 +9,37 @@ from cerberus.tests import assert_fail, assert_success from cerberus.utils import validator_factory from cerberus.validator import BareValidator +from cerberus.platform import PYTHON_VERSION + + +if PYTHON_VERSION > 3 and PYTHON_VERSION < 3.4: + from imp import reload +elif PYTHON_VERSION >= 3.4: + from importlib import reload +else: + pass # Python 2.x + + +def test_pkgresources_version(monkeypatch): + def create_fake_distribution(name): + return Distribution(project_name="cerberus", version="1.2.3") + + with monkeypatch.context() as m: + cerberus = __import__("cerberus") + m.setattr("pkg_resources.get_distribution", create_fake_distribution) + reload(cerberus) + assert cerberus.__version__ == "1.2.3" + + +def test_version_not_found(monkeypatch): + def raise_distribution_not_found(name): + raise DistributionNotFound("pkg_resources cannot get distribution") + + with monkeypatch.context() as m: + cerberus = __import__("cerberus") + m.setattr("pkg_resources.get_distribution", raise_distribution_not_found) + reload(cerberus) + assert cerberus.__version__ == "unknown" def test_clear_cache(validator): @@ -23,8 +55,11 @@ def test_docstring(validator): # Test that testing with the sample schema works as expected # as there might be rules with side-effects in it -@mark.parametrize('test,document', ((assert_fail, {'an_integer': 60}), - (assert_success, {'an_integer': 110}))) + +@mark.parametrize( + "test,document", + ((assert_fail, {"an_integer": 60}), (assert_success, {"an_integer": 110})), +) def test_that_test_fails(test, document): try: test(document) @@ -35,42 +70,42 @@ def test_that_test_fails(test, document): def test_dynamic_types(): - decimal_type = TypeDefinition('decimal', (Decimal,), ()) - document = {'measurement': Decimal(0)} - schema = {'measurement': {'type': 'decimal'}} + decimal_type = TypeDefinition("decimal", (Decimal,), ()) + document = {"measurement": Decimal(0)} + schema = {"measurement": {"type": "decimal"}} validator = Validator() - validator.types_mapping['decimal'] = decimal_type + validator.types_mapping["decimal"] = decimal_type assert_success(document, schema, validator) class MyValidator(Validator): types_mapping = Validator.types_mapping.copy() - types_mapping['decimal'] = decimal_type + types_mapping["decimal"] = decimal_type + validator = MyValidator() assert_success(document, schema, validator) def test_mro(): - assert Validator.__mro__ == (Validator, BareValidator, object), \ - Validator.__mro__ + assert Validator.__mro__ == (Validator, BareValidator, object), Validator.__mro__ def test_mixin_init(): class Mixin(object): def __init__(self, *args, **kwargs): - kwargs['test'] = True + kwargs["test"] = True super(Mixin, self).__init__(*args, **kwargs) - MyValidator = validator_factory('MyValidator', Mixin) + MyValidator = validator_factory("MyValidator", Mixin) validator = MyValidator() - assert validator._config['test'] + assert validator._config["test"] def test_sub_init(): class MyValidator(Validator): def __init__(self, *args, **kwargs): - kwargs['test'] = True + kwargs["test"] = True super(MyValidator, self).__init__(*args, **kwargs) validator = MyValidator() - assert validator._config['test'] + assert validator._config["test"] diff --git a/pipenv/vendor/cerberus/tests/test_customization.py b/pipenv/vendor/cerberus/tests/test_customization.py index 6055894d4b..8bc3f4645e 100644 --- a/pipenv/vendor/cerberus/tests/test_customization.py +++ b/pipenv/vendor/cerberus/tests/test_customization.py @@ -1,12 +1,13 @@ # -*- coding: utf-8 -*- +from pytest import mark + import cerberus from cerberus.tests import assert_fail, assert_success from cerberus.tests.conftest import sample_schema def test_contextual_data_preservation(): - class InheritedValidator(cerberus.Validator): def __init__(self, *args, **kwargs): if 'working_dir' in kwargs: @@ -18,9 +19,9 @@ def _validate_type_test(self, value): return True assert 'test' in InheritedValidator.types - v = InheritedValidator({'test': {'type': 'list', - 'schema': {'type': 'test'}}}, - working_dir='/tmp') + v = InheritedValidator( + {'test': {'type': 'list', 'schema': {'type': 'test'}}}, working_dir='/tmp' + ) assert_success({'test': ['foo']}, validator=v) @@ -42,25 +43,47 @@ def _validate_bar(self, value): assert 'bar' in CustomValidator.validation_rules -def test_issue_265(): +# TODO remove 'validator' as rule parameter with the next major release +@mark.parametrize('rule', ('check_with', 'validator')) +def test_check_with_method(rule): + # https://github.com/pyeve/cerberus/issues/265 + class MyValidator(cerberus.Validator): + def _check_with_oddity(self, field, value): + if not value & 1: + self._error(field, "Must be an odd number") + + v = MyValidator(schema={'amount': {rule: 'oddity'}}) + assert_success(document={'amount': 1}, validator=v) + assert_fail( + document={'amount': 2}, + validator=v, + error=('amount', (), cerberus.errors.CUSTOM, None, ('Must be an odd number',)), + ) + + +# TODO remove test with the next major release +@mark.parametrize('rule', ('check_with', 'validator')) +def test_validator_method(rule): class MyValidator(cerberus.Validator): def _validator_oddity(self, field, value): if not value & 1: self._error(field, "Must be an odd number") - v = MyValidator(schema={'amount': {'validator': 'oddity'}}) + v = MyValidator(schema={'amount': {rule: 'oddity'}}) assert_success(document={'amount': 1}, validator=v) - assert_fail(document={'amount': 2}, validator=v, - error=('amount', (), cerberus.errors.CUSTOM, None, - ('Must be an odd number',))) + assert_fail( + document={'amount': 2}, + validator=v, + error=('amount', (), cerberus.errors.CUSTOM, None, ('Must be an odd number',)), + ) def test_schema_validation_can_be_disabled_in_schema_setter(): - class NonvalidatingValidator(cerberus.Validator): """ Skips schema validation to speed up initialization """ + @cerberus.Validator.schema.setter def schema(self, schema): if schema is None: diff --git a/pipenv/vendor/cerberus/tests/test_errors.py b/pipenv/vendor/cerberus/tests/test_errors.py index df33964f9b..e4d9b37a14 100644 --- a/pipenv/vendor/cerberus/tests/test_errors.py +++ b/pipenv/vendor/cerberus/tests/test_errors.py @@ -24,14 +24,14 @@ def test__error_1(): def test__error_2(): - v = Validator(schema={'foo': {'keyschema': {'type': 'integer'}}}) + v = Validator(schema={'foo': {'keysrules': {'type': 'integer'}}}) v.document = {'foo': {'0': 'bar'}} - v._error('foo', errors.KEYSCHEMA, ()) + v._error('foo', errors.KEYSRULES, ()) error = v._errors[0] assert error.document_path == ('foo',) - assert error.schema_path == ('foo', 'keyschema') + assert error.schema_path == ('foo', 'keysrules') assert error.code == 0x83 - assert error.rule == 'keyschema' + assert error.rule == 'keysrules' assert error.constraint == {'type': 'integer'} assert error.value == {'0': 'bar'} assert error.info == ((),) @@ -40,8 +40,10 @@ def test__error_2(): def test__error_3(): - valids = [{'type': 'string', 'regex': '0x[0-9a-f]{2}'}, - {'type': 'integer', 'min': 0, 'max': 255}] + valids = [ + {'type': 'string', 'regex': '0x[0-9a-f]{2}'}, + {'type': 'integer', 'min': 0, 'max': 255}, + ] v = Validator(schema={'foo': {'oneof': valids}}) v.document = {'foo': '0x100'} v._error('foo', errors.ONEOF, (), 0, 2) @@ -77,8 +79,9 @@ def test_error_tree_from_subschema(validator): assert 'bar' in s_error_tree['foo']['schema'] assert 'type' in s_error_tree['foo']['schema']['bar'] assert s_error_tree['foo']['schema']['bar']['type'].errors[0].value == 0 - assert s_error_tree.fetch_errors_from( - ('foo', 'schema', 'bar', 'type'))[0].value == 0 + assert ( + s_error_tree.fetch_errors_from(('foo', 'schema', 'bar', 'type'))[0].value == 0 + ) def test_error_tree_from_anyof(validator): @@ -98,12 +101,17 @@ def test_error_tree_from_anyof(validator): def test_nested_error_paths(validator): - schema = {'a_dict': {'keyschema': {'type': 'integer'}, - 'valueschema': {'regex': '[a-z]*'}}, - 'a_list': {'schema': {'type': 'string', - 'oneof_regex': ['[a-z]*$', '[A-Z]*']}}} - document = {'a_dict': {0: 'abc', 'one': 'abc', 2: 'aBc', 'three': 'abC'}, - 'a_list': [0, 'abc', 'abC']} + schema = { + 'a_dict': { + 'keysrules': {'type': 'integer'}, + 'valuesrules': {'regex': '[a-z]*'}, + }, + 'a_list': {'schema': {'type': 'string', 'oneof_regex': ['[a-z]*$', '[A-Z]*']}}, + } + document = { + 'a_dict': {0: 'abc', 'one': 'abc', 2: 'aBc', 'three': 'abC'}, + 'a_list': [0, 'abc', 'abC'], + } assert_fail(document, schema, validator=validator) _det = validator.document_error_tree @@ -120,35 +128,59 @@ def test_nested_error_paths(validator): assert len(_det['a_dict'][2].errors) == 1 assert len(_det['a_dict']['three'].errors) == 2 - assert len(_set['a_dict']['keyschema'].errors) == 1 - assert len(_set['a_dict']['valueschema'].errors) == 1 + assert len(_set['a_dict']['keysrules'].errors) == 1 + assert len(_set['a_dict']['valuesrules'].errors) == 1 - assert len(_set['a_dict']['keyschema']['type'].errors) == 2 - assert len(_set['a_dict']['valueschema']['regex'].errors) == 2 + assert len(_set['a_dict']['keysrules']['type'].errors) == 2 + assert len(_set['a_dict']['valuesrules']['regex'].errors) == 2 _ref_err = ValidationError( - ('a_dict', 'one'), ('a_dict', 'keyschema', 'type'), - errors.BAD_TYPE.code, 'type', 'integer', 'one', ()) + ('a_dict', 'one'), + ('a_dict', 'keysrules', 'type'), + errors.BAD_TYPE.code, + 'type', + 'integer', + 'one', + (), + ) assert _det['a_dict']['one'].errors[0] == _ref_err - assert _set['a_dict']['keyschema']['type'].errors[0] == _ref_err + assert _set['a_dict']['keysrules']['type'].errors[0] == _ref_err _ref_err = ValidationError( - ('a_dict', 2), ('a_dict', 'valueschema', 'regex'), - errors.REGEX_MISMATCH.code, 'regex', '[a-z]*$', 'aBc', ()) + ('a_dict', 2), + ('a_dict', 'valuesrules', 'regex'), + errors.REGEX_MISMATCH.code, + 'regex', + '[a-z]*$', + 'aBc', + (), + ) assert _det['a_dict'][2].errors[0] == _ref_err - assert _set['a_dict']['valueschema']['regex'].errors[0] == _ref_err + assert _set['a_dict']['valuesrules']['regex'].errors[0] == _ref_err _ref_err = ValidationError( - ('a_dict', 'three'), ('a_dict', 'keyschema', 'type'), - errors.BAD_TYPE.code, 'type', 'integer', 'three', ()) + ('a_dict', 'three'), + ('a_dict', 'keysrules', 'type'), + errors.BAD_TYPE.code, + 'type', + 'integer', + 'three', + (), + ) assert _det['a_dict']['three'].errors[0] == _ref_err - assert _set['a_dict']['keyschema']['type'].errors[1] == _ref_err + assert _set['a_dict']['keysrules']['type'].errors[1] == _ref_err _ref_err = ValidationError( - ('a_dict', 'three'), ('a_dict', 'valueschema', 'regex'), - errors.REGEX_MISMATCH.code, 'regex', '[a-z]*$', 'abC', ()) + ('a_dict', 'three'), + ('a_dict', 'valuesrules', 'regex'), + errors.REGEX_MISMATCH.code, + 'regex', + '[a-z]*$', + 'abC', + (), + ) assert _det['a_dict']['three'].errors[1] == _ref_err - assert _set['a_dict']['valueschema']['regex'].errors[1] == _ref_err + assert _set['a_dict']['valuesrules']['regex'].errors[1] == _ref_err assert len(_det['a_list'].errors) == 1 assert len(_det['a_list'][0].errors) == 1 @@ -161,34 +193,56 @@ def test_nested_error_paths(validator): assert len(_set['a_list']['schema']['oneof'][1]['regex'].errors) == 1 _ref_err = ValidationError( - ('a_list', 0), ('a_list', 'schema', 'type'), errors.BAD_TYPE.code, - 'type', 'string', 0, ()) + ('a_list', 0), + ('a_list', 'schema', 'type'), + errors.BAD_TYPE.code, + 'type', + 'string', + 0, + (), + ) assert _det['a_list'][0].errors[0] == _ref_err assert _set['a_list']['schema']['type'].errors[0] == _ref_err _ref_err = ValidationError( - ('a_list', 2), ('a_list', 'schema', 'oneof'), errors.ONEOF.code, - 'oneof', 'irrelevant_at_this_point', 'abC', ()) + ('a_list', 2), + ('a_list', 'schema', 'oneof'), + errors.ONEOF.code, + 'oneof', + 'irrelevant_at_this_point', + 'abC', + (), + ) assert _det['a_list'][2].errors[0] == _ref_err assert _set['a_list']['schema']['oneof'].errors[0] == _ref_err _ref_err = ValidationError( - ('a_list', 2), ('a_list', 'schema', 'oneof', 0, 'regex'), - errors.REGEX_MISMATCH.code, 'regex', '[a-z]*$', 'abC', ()) + ('a_list', 2), + ('a_list', 'schema', 'oneof', 0, 'regex'), + errors.REGEX_MISMATCH.code, + 'regex', + '[a-z]*$', + 'abC', + (), + ) assert _det['a_list'][2].errors[1] == _ref_err assert _set['a_list']['schema']['oneof'][0]['regex'].errors[0] == _ref_err _ref_err = ValidationError( - ('a_list', 2), ('a_list', 'schema', 'oneof', 1, 'regex'), - errors.REGEX_MISMATCH.code, 'regex', '[a-z]*$', 'abC', ()) + ('a_list', 2), + ('a_list', 'schema', 'oneof', 1, 'regex'), + errors.REGEX_MISMATCH.code, + 'regex', + '[a-z]*$', + 'abC', + (), + ) assert _det['a_list'][2].errors[2] == _ref_err assert _set['a_list']['schema']['oneof'][1]['regex'].errors[0] == _ref_err def test_queries(): - schema = {'foo': {'type': 'dict', - 'schema': - {'bar': {'type': 'number'}}}} + schema = {'foo': {'type': 'dict', 'schema': {'bar': {'type': 'number'}}}} document = {'foo': {'bar': 'zero'}} validator = Validator(schema) validator(document) @@ -202,59 +256,68 @@ def test_queries(): assert errors.MAPPING_SCHEMA in validator.document_error_tree['foo'] assert errors.BAD_TYPE in validator.document_error_tree['foo']['bar'] assert errors.MAPPING_SCHEMA in validator.schema_error_tree['foo']['schema'] - assert errors.BAD_TYPE in \ - validator.schema_error_tree['foo']['schema']['bar']['type'] + assert ( + errors.BAD_TYPE in validator.schema_error_tree['foo']['schema']['bar']['type'] + ) - assert (validator.document_error_tree['foo'][errors.MAPPING_SCHEMA] - .child_errors[0].code == errors.BAD_TYPE.code) + assert ( + validator.document_error_tree['foo'][errors.MAPPING_SCHEMA].child_errors[0].code + == errors.BAD_TYPE.code + ) def test_basic_error_handler(): handler = errors.BasicErrorHandler() _errors, ref = [], {} - _errors.append(ValidationError( - ['foo'], ['foo'], 0x63, 'readonly', True, None, ())) + _errors.append(ValidationError(['foo'], ['foo'], 0x63, 'readonly', True, None, ())) ref.update({'foo': [handler.messages[0x63]]}) assert handler(_errors) == ref - _errors.append(ValidationError( - ['bar'], ['foo'], 0x42, 'min', 1, 2, ())) + _errors.append(ValidationError(['bar'], ['foo'], 0x42, 'min', 1, 2, ())) ref.update({'bar': [handler.messages[0x42].format(constraint=1)]}) assert handler(_errors) == ref - _errors.append(ValidationError( - ['zap', 'foo'], ['zap', 'schema', 'foo'], 0x24, 'type', 'string', - True, ())) - ref.update({'zap': [{'foo': [handler.messages[0x24].format( - constraint='string')]}]}) + _errors.append( + ValidationError( + ['zap', 'foo'], ['zap', 'schema', 'foo'], 0x24, 'type', 'string', True, () + ) + ) + ref.update({'zap': [{'foo': [handler.messages[0x24].format(constraint='string')]}]}) assert handler(_errors) == ref - _errors.append(ValidationError( - ['zap', 'foo'], ['zap', 'schema', 'foo'], 0x41, 'regex', - '^p[äe]ng$', 'boom', ())) - ref['zap'][0]['foo'].append( - handler.messages[0x41].format(constraint='^p[äe]ng$')) + _errors.append( + ValidationError( + ['zap', 'foo'], + ['zap', 'schema', 'foo'], + 0x41, + 'regex', + '^p[äe]ng$', + 'boom', + (), + ) + ) + ref['zap'][0]['foo'].append(handler.messages[0x41].format(constraint='^p[äe]ng$')) assert handler(_errors) == ref def test_basic_error_of_errors(validator): - schema = {'foo': {'oneof': [ - {'type': 'integer'}, - {'type': 'string'} - ]}} + schema = {'foo': {'oneof': [{'type': 'integer'}, {'type': 'string'}]}} document = {'foo': 23.42} - error = ('foo', ('foo', 'oneof'), errors.ONEOF, - schema['foo']['oneof'], ()) + error = ('foo', ('foo', 'oneof'), errors.ONEOF, schema['foo']['oneof'], ()) child_errors = [ (error[0], error[1] + (0, 'type'), errors.BAD_TYPE, 'integer'), - (error[0], error[1] + (1, 'type'), errors.BAD_TYPE, 'string') + (error[0], error[1] + (1, 'type'), errors.BAD_TYPE, 'string'), ] - assert_fail(document, schema, validator=validator, - error=error, child_errors=child_errors) + assert_fail( + document, schema, validator=validator, error=error, child_errors=child_errors + ) assert validator.errors == { - 'foo': [errors.BasicErrorHandler.messages[0x92], - {'oneof definition 0': ['must be of integer type'], - 'oneof definition 1': ['must be of string type']} - ] + 'foo': [ + errors.BasicErrorHandler.messages[0x92], + { + 'oneof definition 0': ['must be of integer type'], + 'oneof definition 1': ['must be of string type'], + }, + ] } diff --git a/pipenv/vendor/cerberus/tests/test_normalization.py b/pipenv/vendor/cerberus/tests/test_normalization.py index 6e06f553b8..adc281ef71 100644 --- a/pipenv/vendor/cerberus/tests/test_normalization.py +++ b/pipenv/vendor/cerberus/tests/test_normalization.py @@ -1,10 +1,21 @@ # -*- coding: utf-8 -*- +from copy import deepcopy from tempfile import NamedTemporaryFile +from pytest import mark + from cerberus import Validator, errors -from cerberus.tests import (assert_fail, assert_has_error, assert_normalized, - assert_success) +from cerberus.tests import ( + assert_fail, + assert_has_error, + assert_normalized, + assert_success, +) + + +def must_not_be_called(*args, **kwargs): + raise RuntimeError('This shall not be called.') def test_coerce(): @@ -15,21 +26,31 @@ def test_coerce(): def test_coerce_in_dictschema(): - schema = {'thing': {'type': 'dict', - 'schema': {'amount': {'coerce': int}}}} + schema = {'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}} document = {'thing': {'amount': '2'}} expected = {'thing': {'amount': 2}} assert_normalized(document, expected, schema) def test_coerce_in_listschema(): - schema = {'things': {'type': 'list', - 'schema': {'coerce': int}}} + schema = {'things': {'type': 'list', 'schema': {'coerce': int}}} document = {'things': ['1', '2', '3']} expected = {'things': [1, 2, 3]} assert_normalized(document, expected, schema) +def test_coerce_in_listitems(): + schema = {'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str}]}} + document = {'things': ['1', 2]} + expected = {'things': [1, '2']} + assert_normalized(document, expected, schema) + + validator = Validator(schema) + document['things'].append(3) + assert not validator(document) + assert validator.document['things'] == document['things'] + + def test_coerce_in_dictschema_in_listschema(): item_schema = {'type': 'dict', 'schema': {'amount': {'coerce': int}}} schema = {'things': {'type': 'list', 'schema': item_schema}} @@ -39,9 +60,7 @@ def test_coerce_in_dictschema_in_listschema(): def test_coerce_not_destructive(): - schema = { - 'amount': {'coerce': int} - } + schema = {'amount': {'coerce': int}} v = Validator(schema) doc = {'amount': '1'} v.validate(doc) @@ -52,16 +71,48 @@ def test_coerce_catches_ValueError(): schema = {'amount': {'coerce': int}} _errors = assert_fail({'amount': 'not_a_number'}, schema) _errors[0].info = () # ignore exception message here - assert_has_error(_errors, 'amount', ('amount', 'coerce'), - errors.COERCION_FAILED, int) + assert_has_error( + _errors, 'amount', ('amount', 'coerce'), errors.COERCION_FAILED, int + ) + + +def test_coerce_in_listitems_catches_ValueError(): + schema = {'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str}]}} + document = {'things': ['not_a_number', 2]} + _errors = assert_fail(document, schema) + _errors[0].info = () # ignore exception message here + assert_has_error( + _errors, + ('things', 0), + ('things', 'items', 'coerce'), + errors.COERCION_FAILED, + int, + ) def test_coerce_catches_TypeError(): schema = {'name': {'coerce': str.lower}} _errors = assert_fail({'name': 1234}, schema) _errors[0].info = () # ignore exception message here - assert_has_error(_errors, 'name', ('name', 'coerce'), - errors.COERCION_FAILED, str.lower) + assert_has_error( + _errors, 'name', ('name', 'coerce'), errors.COERCION_FAILED, str.lower + ) + + +def test_coerce_in_listitems_catches_TypeError(): + schema = { + 'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str.lower}]} + } + document = {'things': ['1', 2]} + _errors = assert_fail(document, schema) + _errors[0].info = () # ignore exception message here + assert_has_error( + _errors, + ('things', 1), + ('things', 'items', 'coerce'), + errors.COERCION_FAILED, + str.lower, + ) def test_coerce_unknown(): @@ -88,16 +139,16 @@ def _normalize_coerce_multiply(self, value): def test_coerce_chain(): - drop_prefix = lambda x: x[2:] - upper = lambda x: x.upper() + drop_prefix = lambda x: x[2:] # noqa: E731 + upper = lambda x: x.upper() # noqa: E731 schema = {'foo': {'coerce': [hex, drop_prefix, upper]}} assert_normalized({'foo': 15}, {'foo': 'F'}, schema) def test_coerce_chain_aborts(validator): def dont_do_me(value): - raise AssertionError('The coercion chain did not abort after an ' - 'error.') + raise AssertionError('The coercion chain did not abort after an ' 'error.') + schema = {'foo': {'coerce': [hex, dont_do_me]}} validator({'foo': '0'}, schema) assert errors.COERCION_FAILED in validator._errors @@ -105,12 +156,12 @@ def dont_do_me(value): def test_coerce_non_digit_in_sequence(validator): # https://github.com/pyeve/cerberus/issues/211 - schema = {'data': {'type': 'list', - 'schema': {'type': 'integer', 'coerce': int}}} + schema = {'data': {'type': 'list', 'schema': {'type': 'integer', 'coerce': int}}} document = {'data': ['q']} assert validator.validated(document, schema) is None - assert (validator.validated(document, schema, always_return_document=True) - == document) # noqa: W503 + assert ( + validator.validated(document, schema, always_return_document=True) == document + ) # noqa: W503 def test_nullables_dont_fail_coerce(): @@ -119,6 +170,18 @@ def test_nullables_dont_fail_coerce(): assert_normalized(document, document, schema) +def test_nullables_fail_coerce_on_non_null_values(validator): + def failing_coercion(value): + raise Exception("expected to fail") + + schema = {'foo': {'coerce': failing_coercion, 'nullable': True, 'type': 'integer'}} + document = {'foo': None} + assert_normalized(document, document, schema) + + validator({'foo': 2}, schema) + assert errors.COERCION_FAILED in validator._errors + + def test_normalized(): schema = {'amount': {'coerce': int}} document = {'amount': '2'} @@ -154,9 +217,13 @@ def test_purge_unknown(): def test_purge_unknown_in_subschema(): - schema = {'foo': {'type': 'dict', - 'schema': {'foo': {'type': 'string'}}, - 'purge_unknown': True}} + schema = { + 'foo': { + 'type': 'dict', + 'schema': {'foo': {'type': 'string'}}, + 'purge_unknown': True, + } + } document = {'foo': {'bar': ''}} expected = {'foo': {}} assert_normalized(document, expected, schema) @@ -175,8 +242,7 @@ def test_issue_147_complex(): def test_issue_147_nested_dict(): - schema = {'thing': {'type': 'dict', - 'schema': {'amount': {'coerce': int}}}} + schema = {'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}} ref_obj = '2' document = {'thing': {'amount': ref_obj}} normalized = Validator(schema).normalized(document) @@ -186,20 +252,21 @@ def test_issue_147_nested_dict(): assert document['thing']['amount'] is ref_obj -def test_coerce_in_valueschema(): +def test_coerce_in_valuesrules(): # https://github.com/pyeve/cerberus/issues/155 - schema = {'thing': {'type': 'dict', - 'valueschema': {'coerce': int, - 'type': 'integer'}}} + schema = { + 'thing': {'type': 'dict', 'valuesrules': {'coerce': int, 'type': 'integer'}} + } document = {'thing': {'amount': '2'}} expected = {'thing': {'amount': 2}} assert_normalized(document, expected, schema) -def test_coerce_in_keyschema(): +def test_coerce_in_keysrules(): # https://github.com/pyeve/cerberus/issues/155 - schema = {'thing': {'type': 'dict', - 'keyschema': {'coerce': int, 'type': 'integer'}}} + schema = { + 'thing': {'type': 'dict', 'keysrules': {'coerce': int, 'type': 'integer'}} + } document = {'thing': {'5': 'foo'}} expected = {'thing': {5: 'foo'}} assert_normalized(document, expected, schema) @@ -207,8 +274,7 @@ def test_coerce_in_keyschema(): def test_coercion_of_sequence_items(validator): # https://github.com/pyeve/cerberus/issues/161 - schema = {'a_list': {'type': 'list', 'schema': {'type': 'float', - 'coerce': float}}} + schema = {'a_list': {'type': 'list', 'schema': {'type': 'float', 'coerce': float}}} document = {'a_list': [3, 4, 5]} expected = {'a_list': [3.0, 4.0, 5.0]} assert_normalized(document, expected, schema, validator) @@ -216,110 +282,76 @@ def test_coercion_of_sequence_items(validator): assert isinstance(x, float) -def test_default_missing(): - _test_default_missing({'default': 'bar_value'}) - - -def test_default_setter_missing(): - _test_default_missing({'default_setter': lambda doc: 'bar_value'}) - - -def _test_default_missing(default): +@mark.parametrize( + 'default', ({'default': 'bar_value'}, {'default_setter': lambda doc: 'bar_value'}) +) +def test_default_missing(default): bar_schema = {'type': 'string'} bar_schema.update(default) - schema = {'foo': {'type': 'string'}, - 'bar': bar_schema} + schema = {'foo': {'type': 'string'}, 'bar': bar_schema} document = {'foo': 'foo_value'} expected = {'foo': 'foo_value', 'bar': 'bar_value'} assert_normalized(document, expected, schema) -def test_default_existent(): - _test_default_existent({'default': 'bar_value'}) - - -def test_default_setter_existent(): - def raise_error(doc): - raise RuntimeError('should not be called') - _test_default_existent({'default_setter': raise_error}) - - -def _test_default_existent(default): +@mark.parametrize( + 'default', ({'default': 'bar_value'}, {'default_setter': must_not_be_called}) +) +def test_default_existent(default): bar_schema = {'type': 'string'} bar_schema.update(default) - schema = {'foo': {'type': 'string'}, - 'bar': bar_schema} + schema = {'foo': {'type': 'string'}, 'bar': bar_schema} document = {'foo': 'foo_value', 'bar': 'non_default'} assert_normalized(document, document.copy(), schema) -def test_default_none_nullable(): - _test_default_none_nullable({'default': 'bar_value'}) - - -def test_default_setter_none_nullable(): - def raise_error(doc): - raise RuntimeError('should not be called') - _test_default_none_nullable({'default_setter': raise_error}) - - -def _test_default_none_nullable(default): - bar_schema = {'type': 'string', - 'nullable': True} +@mark.parametrize( + 'default', ({'default': 'bar_value'}, {'default_setter': must_not_be_called}) +) +def test_default_none_nullable(default): + bar_schema = {'type': 'string', 'nullable': True} bar_schema.update(default) - schema = {'foo': {'type': 'string'}, - 'bar': bar_schema} + schema = {'foo': {'type': 'string'}, 'bar': bar_schema} document = {'foo': 'foo_value', 'bar': None} assert_normalized(document, document.copy(), schema) -def test_default_none_nonnullable(): - _test_default_none_nullable({'default': 'bar_value'}) - - -def test_default_setter_none_nonnullable(): - _test_default_none_nullable( - {'default_setter': lambda doc: 'bar_value'}) - - -def _test_default_none_nonnullable(default): - bar_schema = {'type': 'string', - 'nullable': False} +@mark.parametrize( + 'default', ({'default': 'bar_value'}, {'default_setter': lambda doc: 'bar_value'}) +) +def test_default_none_nonnullable(default): + bar_schema = {'type': 'string', 'nullable': False} bar_schema.update(default) - schema = {'foo': {'type': 'string'}, - 'bar': bar_schema} - document = {'foo': 'foo_value', 'bar': 'bar_value'} - assert_normalized(document, document.copy(), schema) + schema = {'foo': {'type': 'string'}, 'bar': bar_schema} + document = {'foo': 'foo_value', 'bar': None} + expected = {'foo': 'foo_value', 'bar': 'bar_value'} + assert_normalized(document, expected, schema) def test_default_none_default_value(): - schema = {'foo': {'type': 'string'}, - 'bar': {'type': 'string', - 'nullable': True, - 'default': None}} + schema = { + 'foo': {'type': 'string'}, + 'bar': {'type': 'string', 'nullable': True, 'default': None}, + } document = {'foo': 'foo_value'} expected = {'foo': 'foo_value', 'bar': None} assert_normalized(document, expected, schema) -def test_default_missing_in_subschema(): - _test_default_missing_in_subschema({'default': 'bar_value'}) - - -def test_default_setter_missing_in_subschema(): - _test_default_missing_in_subschema( - {'default_setter': lambda doc: 'bar_value'}) - - -def _test_default_missing_in_subschema(default): +@mark.parametrize( + 'default', ({'default': 'bar_value'}, {'default_setter': lambda doc: 'bar_value'}) +) +def test_default_missing_in_subschema(default): bar_schema = {'type': 'string'} bar_schema.update(default) - schema = {'thing': {'type': 'dict', - 'schema': {'foo': {'type': 'string'}, - 'bar': bar_schema}}} + schema = { + 'thing': { + 'type': 'dict', + 'schema': {'foo': {'type': 'string'}, 'bar': bar_schema}, + } + } document = {'thing': {'foo': 'foo_value'}} - expected = {'thing': {'foo': 'foo_value', - 'bar': 'bar_value'}} + expected = {'thing': {'foo': 'foo_value', 'bar': 'bar_value'}} assert_normalized(document, expected, schema) @@ -328,8 +360,7 @@ def test_depending_default_setters(): 'a': {'type': 'integer'}, 'b': {'type': 'integer', 'default_setter': lambda d: d['a'] + 1}, 'c': {'type': 'integer', 'default_setter': lambda d: d['b'] * 2}, - 'd': {'type': 'integer', - 'default_setter': lambda d: d['b'] + d['c']} + 'd': {'type': 'integer', 'default_setter': lambda d: d['b'] + d['c']}, } document = {'a': 1} expected = {'a': 1, 'b': 2, 'c': 4, 'd': 6} @@ -339,7 +370,7 @@ def test_depending_default_setters(): def test_circular_depending_default_setters(validator): schema = { 'a': {'type': 'integer', 'default_setter': lambda d: d['b'] + 1}, - 'b': {'type': 'integer', 'default_setter': lambda d: d['a'] + 1} + 'b': {'type': 'integer', 'default_setter': lambda d: d['a'] + 1}, } validator({}, schema) assert errors.SETTING_DEFAULT_FAILED in validator._errors @@ -353,14 +384,16 @@ def test_issue_250(): 'schema': { 'type': 'dict', 'allow_unknown': True, - 'schema': {'a': {'type': 'string'}} - } + 'schema': {'a': {'type': 'string'}}, + }, } } document = {'list': {'is_a': 'mapping'}} - assert_fail(document, schema, - error=('list', ('list', 'type'), errors.BAD_TYPE, - schema['list']['type'])) + assert_fail( + document, + schema, + error=('list', ('list', 'type'), errors.BAD_TYPE, schema['list']['type']), + ) def test_issue_250_no_type_pass_on_list(): @@ -370,7 +403,7 @@ def test_issue_250_no_type_pass_on_list(): 'schema': { 'allow_unknown': True, 'type': 'dict', - 'schema': {'a': {'type': 'string'}} + 'schema': {'a': {'type': 'string'}}, } } } @@ -381,28 +414,25 @@ def test_issue_250_no_type_pass_on_list(): def test_issue_250_no_type_fail_on_dict(): # https://github.com/pyeve/cerberus/issues/250 schema = { - 'list': { - 'schema': { - 'allow_unknown': True, - 'schema': {'a': {'type': 'string'}} - } - } + 'list': {'schema': {'allow_unknown': True, 'schema': {'a': {'type': 'string'}}}} } document = {'list': {'a': {'a': 'known'}}} - assert_fail(document, schema, - error=('list', ('list', 'schema'), errors.BAD_TYPE_FOR_SCHEMA, - schema['list']['schema'])) + assert_fail( + document, + schema, + error=( + 'list', + ('list', 'schema'), + errors.BAD_TYPE_FOR_SCHEMA, + schema['list']['schema'], + ), + ) def test_issue_250_no_type_fail_pass_on_other(): # https://github.com/pyeve/cerberus/issues/250 schema = { - 'list': { - 'schema': { - 'allow_unknown': True, - 'schema': {'a': {'type': 'string'}} - } - } + 'list': {'schema': {'allow_unknown': True, 'schema': {'a': {'type': 'string'}}}} } document = {'list': 1} assert_normalized(document, document, schema) @@ -416,21 +446,20 @@ def test_allow_unknown_with_of_rules(): { 'type': 'dict', 'allow_unknown': True, - 'schema': {'known': {'type': 'string'}} - }, - { - 'type': 'dict', - 'schema': {'known': {'type': 'string'}} + 'schema': {'known': {'type': 'string'}}, }, + {'type': 'dict', 'schema': {'known': {'type': 'string'}}}, ] } } # check regression and that allow unknown does not cause any different # than expected behaviour for one-of. document = {'test': {'known': 's'}} - assert_fail(document, schema, - error=('test', ('test', 'oneof'), - errors.ONEOF, schema['test']['oneof'])) + assert_fail( + document, + schema, + error=('test', ('test', 'oneof'), errors.ONEOF, schema['test']['oneof']), + ) # check that allow_unknown is actually applied document = {'test': {'known': 's', 'unknown': 'asd'}} assert_success(document, schema) @@ -439,18 +468,20 @@ def test_allow_unknown_with_of_rules(): def test_271_normalising_tuples(): # https://github.com/pyeve/cerberus/issues/271 schema = { - 'my_field': { - 'type': 'list', - 'schema': {'type': ('string', 'number', 'dict')} - } + 'my_field': {'type': 'list', 'schema': {'type': ('string', 'number', 'dict')}} } - document = {'my_field': ('foo', 'bar', 42, 'albert', - 'kandinsky', {'items': 23})} + document = {'my_field': ('foo', 'bar', 42, 'albert', 'kandinsky', {'items': 23})} assert_success(document, schema) normalized = Validator(schema).normalized(document) - assert normalized['my_field'] == ('foo', 'bar', 42, 'albert', - 'kandinsky', {'items': 23}) + assert normalized['my_field'] == ( + 'foo', + 'bar', + 42, + 'albert', + 'kandinsky', + {'items': 23}, + ) def test_allow_unknown_wo_schema(): @@ -472,14 +503,41 @@ def test_allow_unknown_with_purge_unknown_subdocument(): schema = { 'foo': { 'type': 'dict', - 'schema': { - 'bar': { - 'type': 'string' - } - }, - 'allow_unknown': True + 'schema': {'bar': {'type': 'string'}}, + 'allow_unknown': True, } } document = {'foo': {'bar': 'baz', 'corge': False}, 'thud': 'xyzzy'} expected = {'foo': {'bar': 'baz', 'corge': False}} assert_normalized(document, expected, schema, validator) + + +def test_purge_readonly(): + schema = { + 'description': {'type': 'string', 'maxlength': 500}, + 'last_updated': {'readonly': True}, + } + validator = Validator(schema=schema, purge_readonly=True) + document = {'description': 'it is a thing'} + expected = deepcopy(document) + document['last_updated'] = 'future' + assert_normalized(document, expected, validator=validator) + + +def test_defaults_in_allow_unknown_schema(): + schema = {'meta': {'type': 'dict'}, 'version': {'type': 'string'}} + allow_unknown = { + 'type': 'dict', + 'schema': { + 'cfg_path': {'type': 'string', 'default': 'cfg.yaml'}, + 'package': {'type': 'string'}, + }, + } + validator = Validator(schema=schema, allow_unknown=allow_unknown) + + document = {'version': '1.2.3', 'plugin_foo': {'package': 'foo'}} + expected = { + 'version': '1.2.3', + 'plugin_foo': {'package': 'foo', 'cfg_path': 'cfg.yaml'}, + } + assert_normalized(document, expected, schema, validator) diff --git a/pipenv/vendor/cerberus/tests/test_registries.py b/pipenv/vendor/cerberus/tests/test_registries.py index 05f01c52c1..b628952d76 100644 --- a/pipenv/vendor/cerberus/tests/test_registries.py +++ b/pipenv/vendor/cerberus/tests/test_registries.py @@ -1,14 +1,17 @@ # -*- coding: utf-8 -*- from cerberus import schema_registry, rules_set_registry, Validator -from cerberus.tests import (assert_fail, assert_normalized, - assert_schema_error, assert_success) +from cerberus.tests import ( + assert_fail, + assert_normalized, + assert_schema_error, + assert_success, +) def test_schema_registry_simple(): schema_registry.add('foo', {'bar': {'type': 'string'}}) - schema = {'a': {'schema': 'foo'}, - 'b': {'schema': 'foo'}} + schema = {'a': {'schema': 'foo'}, 'b': {'schema': 'foo'}} document = {'a': {'bar': 'a'}, 'b': {'bar': 'b'}} assert_success(document, schema) @@ -33,23 +36,22 @@ def test_allow_unknown_as_reference(): def test_recursion(): - rules_set_registry.add('self', - {'type': 'dict', 'allow_unknown': 'self'}) + rules_set_registry.add('self', {'type': 'dict', 'allow_unknown': 'self'}) v = Validator(allow_unknown='self') assert_success({0: {1: {2: {}}}}, {}, v) def test_references_remain_unresolved(validator): - rules_set_registry.extend((('boolean', {'type': 'boolean'}), - ('booleans', {'valueschema': 'boolean'}))) + rules_set_registry.extend( + (('boolean', {'type': 'boolean'}), ('booleans', {'valuesrules': 'boolean'})) + ) validator.schema = {'foo': 'booleans'} assert 'booleans' == validator.schema['foo'] - assert 'boolean' == rules_set_registry._storage['booleans']['valueschema'] + assert 'boolean' == rules_set_registry._storage['booleans']['valuesrules'] def test_rules_registry_with_anyof_type(): - rules_set_registry.add('string_or_integer', - {'anyof_type': ['string', 'integer']}) + rules_set_registry.add('string_or_integer', {'anyof_type': ['string', 'integer']}) schema = {'soi': 'string_or_integer'} assert_success({'soi': 'hello'}, schema) diff --git a/pipenv/vendor/cerberus/tests/test_schema.py b/pipenv/vendor/cerberus/tests/test_schema.py index 1776cae3a1..84e5094600 100644 --- a/pipenv/vendor/cerberus/tests/test_schema.py +++ b/pipenv/vendor/cerberus/tests/test_schema.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +import re + import pytest from cerberus import Validator, errors, SchemaError @@ -9,14 +11,14 @@ def test_empty_schema(): validator = Validator() - with pytest.raises(SchemaError, message=errors.SCHEMA_ERROR_MISSING): + with pytest.raises(SchemaError, match=errors.SCHEMA_ERROR_MISSING): validator({}, schema=None) def test_bad_schema_type(validator): schema = "this string should really be dict" - exp_msg = errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema) - with pytest.raises(SchemaError, message=exp_msg): + msg = errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema) + with pytest.raises(SchemaError, match=msg): validator.schema = schema @@ -28,23 +30,21 @@ def test_bad_schema_type_field(validator): def test_unknown_rule(validator): - message = "{'foo': [{'unknown': ['unknown rule']}]}" - with pytest.raises(SchemaError, message=message): + msg = "{'foo': [{'unknown': ['unknown rule']}]}" + with pytest.raises(SchemaError, match=re.escape(msg)): validator.schema = {'foo': {'unknown': 'rule'}} def test_unknown_type(validator): - field = 'name' - value = 'catch_me' - message = str({field: [{'type': ['unallowed value %s' % value]}]}) - with pytest.raises(SchemaError, message=message): - validator.schema = {'foo': {'unknown': 'rule'}} + msg = str({'foo': [{'type': ['Unsupported types: unknown']}]}) + with pytest.raises(SchemaError, match=re.escape(msg)): + validator.schema = {'foo': {'type': 'unknown'}} def test_bad_schema_definition(validator): field = 'name' - message = str({field: ['must be of dict type']}) - with pytest.raises(SchemaError, message=message): + msg = str({field: ['must be of dict type']}) + with pytest.raises(SchemaError, match=re.escape(msg)): validator.schema = {field: 'this should really be a dict'} @@ -61,14 +61,14 @@ def test_normalization_rules_are_invalid_in_of_rules(): def test_anyof_allof_schema_validate(): # make sure schema with 'anyof' and 'allof' constraints are checked # correctly - schema = {'doc': {'type': 'dict', - 'anyof': [ - {'schema': [{'param': {'type': 'number'}}]}]}} + schema = { + 'doc': {'type': 'dict', 'anyof': [{'schema': [{'param': {'type': 'number'}}]}]} + } assert_schema_error({'doc': 'this is my document'}, schema) - schema = {'doc': {'type': 'dict', - 'allof': [ - {'schema': [{'param': {'type': 'number'}}]}]}} + schema = { + 'doc': {'type': 'dict', 'allof': [{'schema': [{'param': {'type': 'number'}}]}]} + } assert_schema_error({'doc': 'this is my document'}, schema) @@ -88,24 +88,87 @@ def test_validated_schema_cache(): v = Validator({'foozifix': {'coerce': int}}) assert len(v._valid_schemas) == cache_size - max_cache_size = 147 - assert cache_size <= max_cache_size, \ - "There's an unexpected high amount (%s) of cached valid " \ - "definition schemas. Unless you added further tests, " \ - "there are good chances that something is wrong. " \ - "If you added tests with new schemas, you can try to " \ - "adjust the variable `max_cache_size` according to " \ + max_cache_size = 160 + assert cache_size <= max_cache_size, ( + "There's an unexpected high amount (%s) of cached valid " + "definition schemas. Unless you added further tests, " + "there are good chances that something is wrong. " + "If you added tests with new schemas, you can try to " + "adjust the variable `max_cache_size` according to " "the added schemas." % cache_size + ) def test_expansion_in_nested_schema(): schema = {'detroit': {'schema': {'anyof_regex': ['^Aladdin', 'Sane$']}}} v = Validator(schema) - assert (v.schema['detroit']['schema'] == - {'anyof': [{'regex': '^Aladdin'}, {'regex': 'Sane$'}]}) + assert v.schema['detroit']['schema'] == { + 'anyof': [{'regex': '^Aladdin'}, {'regex': 'Sane$'}] + } def test_unvalidated_schema_can_be_copied(): schema = UnvalidatedSchema() schema_copy = schema.copy() assert schema_copy == schema + + +# TODO remove with next major release +def test_deprecated_rule_names_in_valueschema(): + def check_with(field, value, error): + pass + + schema = { + "field_1": { + "type": "dict", + "valueschema": { + "type": "dict", + "keyschema": {"type": "string"}, + "valueschema": {"type": "string"}, + }, + }, + "field_2": { + "type": "list", + "items": [ + {"keyschema": {}}, + {"validator": check_with}, + {"valueschema": {}}, + ], + }, + } + + validator = Validator(schema) + + assert validator.schema == { + "field_1": { + "type": "dict", + "valuesrules": { + "type": "dict", + "keysrules": {"type": "string"}, + "valuesrules": {"type": "string"}, + }, + }, + "field_2": { + "type": "list", + "items": [ + {"keysrules": {}}, + {"check_with": check_with}, + {"valuesrules": {}}, + ], + }, + } + + +def test_anyof_check_with(): + def foo(field, value, error): + pass + + def bar(field, value, error): + pass + + schema = {'field': {'anyof_check_with': [foo, bar]}} + validator = Validator(schema) + + assert validator.schema == { + 'field': {'anyof': [{'check_with': foo}, {'check_with': bar}]} + } diff --git a/pipenv/vendor/cerberus/tests/test_utils.py b/pipenv/vendor/cerberus/tests/test_utils.py new file mode 100644 index 0000000000..6ab387909f --- /dev/null +++ b/pipenv/vendor/cerberus/tests/test_utils.py @@ -0,0 +1,11 @@ +from cerberus.utils import compare_paths_lt + + +def test_compare_paths(): + lesser = ('a_dict', 'keysrules') + greater = ('a_dict', 'valuesrules') + assert compare_paths_lt(lesser, greater) + + lesser += ('type',) + greater += ('regex',) + assert compare_paths_lt(lesser, greater) diff --git a/pipenv/vendor/cerberus/tests/test_validation.py b/pipenv/vendor/cerberus/tests/test_validation.py index 1f828fac5b..ead7951733 100644 --- a/pipenv/vendor/cerberus/tests/test_validation.py +++ b/pipenv/vendor/cerberus/tests/test_validation.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- +import itertools import re import sys from datetime import datetime, date @@ -10,29 +11,34 @@ from cerberus import errors, Validator from cerberus.tests import ( - assert_bad_type, assert_document_error, assert_fail, assert_has_error, - assert_not_has_error, assert_success + assert_bad_type, + assert_document_error, + assert_fail, + assert_has_error, + assert_not_has_error, + assert_success, ) from cerberus.tests.conftest import sample_schema def test_empty_document(): - assert_document_error(None, sample_schema, None, - errors.DOCUMENT_MISSING) + assert_document_error(None, sample_schema, None, errors.DOCUMENT_MISSING) def test_bad_document_type(): document = "not a dict" assert_document_error( - document, sample_schema, None, - errors.DOCUMENT_FORMAT.format(document) + document, sample_schema, None, errors.DOCUMENT_FORMAT.format(document) ) def test_unknown_field(validator): field = 'surname' - assert_fail({field: 'doe'}, validator=validator, - error=(field, (), errors.UNKNOWN_FIELD, None)) + assert_fail( + {field: 'doe'}, + validator=validator, + error=(field, (), errors.UNKNOWN_FIELD, None), + ) assert validator.errors == {field: ['unknown field']} @@ -45,14 +51,19 @@ def test_empty_field_definition(document): def test_required_field(schema): field = 'a_required_string' required_string_extension = { - 'a_required_string': {'type': 'string', - 'minlength': 2, - 'maxlength': 10, - 'required': True}} + 'a_required_string': { + 'type': 'string', + 'minlength': 2, + 'maxlength': 10, + 'required': True, + } + } schema.update(required_string_extension) - assert_fail({'an_integer': 1}, schema, - error=(field, (field, 'required'), errors.REQUIRED_FIELD, - True)) + assert_fail( + {'an_integer': 1}, + schema, + error=(field, (field, 'required'), errors.REQUIRED_FIELD, True), + ) def test_nullable_field(): @@ -64,22 +75,23 @@ def test_nullable_field(): assert_fail({'a_not_nullable_field_without_type': None}) +def test_nullable_skips_allowed(): + schema = {'role': {'allowed': ['agent', 'client', 'supplier'], 'nullable': True}} + assert_success({'role': None}, schema) + + def test_readonly_field(): field = 'a_readonly_string' - assert_fail({field: 'update me if you can'}, - error=(field, (field, 'readonly'), errors.READONLY_FIELD, True)) + assert_fail( + {field: 'update me if you can'}, + error=(field, (field, 'readonly'), errors.READONLY_FIELD, True), + ) def test_readonly_field_first_rule(): # test that readonly rule is checked before any other rule, and blocks. # See #63. - schema = { - 'a_readonly_number': { - 'type': 'integer', - 'readonly': True, - 'max': 1 - } - } + schema = {'a_readonly_number': {'type': 'integer', 'readonly': True, 'max': 1}} v = Validator(schema) v.validate({'a_readonly_number': 2}) # it would be a list if there's more than one error; we get a dict @@ -89,28 +101,34 @@ def test_readonly_field_first_rule(): def test_readonly_field_with_default_value(): schema = { - 'created': { - 'type': 'string', - 'readonly': True, - 'default': 'today' - }, + 'created': {'type': 'string', 'readonly': True, 'default': 'today'}, 'modified': { 'type': 'string', 'readonly': True, - 'default_setter': lambda d: d['created'] - } + 'default_setter': lambda d: d['created'], + }, } assert_success({}, schema) - expected_errors = [('created', ('created', 'readonly'), - errors.READONLY_FIELD, - schema['created']['readonly']), - ('modified', ('modified', 'readonly'), - errors.READONLY_FIELD, - schema['modified']['readonly'])] - assert_fail({'created': 'tomorrow', 'modified': 'today'}, - schema, errors=expected_errors) - assert_fail({'created': 'today', 'modified': 'today'}, - schema, errors=expected_errors) + expected_errors = [ + ( + 'created', + ('created', 'readonly'), + errors.READONLY_FIELD, + schema['created']['readonly'], + ), + ( + 'modified', + ('modified', 'readonly'), + errors.READONLY_FIELD, + schema['modified']['readonly'], + ), + ] + assert_fail( + {'created': 'tomorrow', 'modified': 'today'}, schema, errors=expected_errors + ) + assert_fail( + {'created': 'today', 'modified': 'today'}, schema, errors=expected_errors + ) def test_nested_readonly_field_with_default_value(): @@ -118,33 +136,40 @@ def test_nested_readonly_field_with_default_value(): 'some_field': { 'type': 'dict', 'schema': { - 'created': { - 'type': 'string', - 'readonly': True, - 'default': 'today' - }, + 'created': {'type': 'string', 'readonly': True, 'default': 'today'}, 'modified': { 'type': 'string', 'readonly': True, - 'default_setter': lambda d: d['created'] - } - } + 'default_setter': lambda d: d['created'], + }, + }, } } assert_success({'some_field': {}}, schema) expected_errors = [ - (('some_field', 'created'), - ('some_field', 'schema', 'created', 'readonly'), - errors.READONLY_FIELD, - schema['some_field']['schema']['created']['readonly']), - (('some_field', 'modified'), - ('some_field', 'schema', 'modified', 'readonly'), - errors.READONLY_FIELD, - schema['some_field']['schema']['modified']['readonly'])] - assert_fail({'some_field': {'created': 'tomorrow', 'modified': 'now'}}, - schema, errors=expected_errors) - assert_fail({'some_field': {'created': 'today', 'modified': 'today'}}, - schema, errors=expected_errors) + ( + ('some_field', 'created'), + ('some_field', 'schema', 'created', 'readonly'), + errors.READONLY_FIELD, + schema['some_field']['schema']['created']['readonly'], + ), + ( + ('some_field', 'modified'), + ('some_field', 'schema', 'modified', 'readonly'), + errors.READONLY_FIELD, + schema['some_field']['schema']['modified']['readonly'], + ), + ] + assert_fail( + {'some_field': {'created': 'tomorrow', 'modified': 'now'}}, + schema, + errors=expected_errors, + ) + assert_fail( + {'some_field': {'created': 'today', 'modified': 'today'}}, + schema, + errors=expected_errors, + ) def test_repeated_readonly(validator): @@ -195,44 +220,73 @@ def test_bad_max_length(schema): field = 'a_string' max_length = schema[field]['maxlength'] value = "".join(choice(ascii_lowercase) for i in range(max_length + 1)) - assert_fail({field: value}, - error=(field, (field, 'maxlength'), errors.MAX_LENGTH, - max_length, (len(value),))) + assert_fail( + {field: value}, + error=( + field, + (field, 'maxlength'), + errors.MAX_LENGTH, + max_length, + (len(value),), + ), + ) def test_bad_max_length_binary(schema): field = 'a_binary' max_length = schema[field]['maxlength'] value = b'\x00' * (max_length + 1) - assert_fail({field: value}, - error=(field, (field, 'maxlength'), errors.MAX_LENGTH, - max_length, (len(value),))) + assert_fail( + {field: value}, + error=( + field, + (field, 'maxlength'), + errors.MAX_LENGTH, + max_length, + (len(value),), + ), + ) def test_bad_min_length(schema): field = 'a_string' min_length = schema[field]['minlength'] value = "".join(choice(ascii_lowercase) for i in range(min_length - 1)) - assert_fail({field: value}, - error=(field, (field, 'minlength'), errors.MIN_LENGTH, - min_length, (len(value),))) + assert_fail( + {field: value}, + error=( + field, + (field, 'minlength'), + errors.MIN_LENGTH, + min_length, + (len(value),), + ), + ) def test_bad_min_length_binary(schema): field = 'a_binary' min_length = schema[field]['minlength'] value = b'\x00' * (min_length - 1) - assert_fail({field: value}, - error=(field, (field, 'minlength'), errors.MIN_LENGTH, - min_length, (len(value),))) + assert_fail( + {field: value}, + error=( + field, + (field, 'minlength'), + errors.MIN_LENGTH, + min_length, + (len(value),), + ), + ) def test_bad_max_value(schema): def assert_bad_max_value(field, inc): max_value = schema[field]['max'] value = max_value + inc - assert_fail({field: value}, - error=(field, (field, 'max'), errors.MAX_VALUE, max_value)) + assert_fail( + {field: value}, error=(field, (field, 'max'), errors.MAX_VALUE, max_value) + ) field = 'an_integer' assert_bad_max_value(field, 1) @@ -246,9 +300,9 @@ def test_bad_min_value(schema): def assert_bad_min_value(field, inc): min_value = schema[field]['min'] value = min_value - inc - assert_fail({field: value}, - error=(field, (field, 'min'), - errors.MIN_VALUE, min_value)) + assert_fail( + {field: value}, error=(field, (field, 'min'), errors.MIN_VALUE, min_value) + ) field = 'an_integer' assert_bad_min_value(field, 1) @@ -261,65 +315,112 @@ def assert_bad_min_value(field, inc): def test_bad_schema(): field = 'a_dict' subschema_field = 'address' - schema = {field: {'type': 'dict', - 'schema': {subschema_field: {'type': 'string'}, - 'city': {'type': 'string', 'required': True}} - }} + schema = { + field: { + 'type': 'dict', + 'schema': { + subschema_field: {'type': 'string'}, + 'city': {'type': 'string', 'required': True}, + }, + } + } document = {field: {subschema_field: 34}} validator = Validator(schema) assert_fail( - document, validator=validator, - error=(field, (field, 'schema'), errors.MAPPING_SCHEMA, - validator.schema['a_dict']['schema']), + document, + validator=validator, + error=( + field, + (field, 'schema'), + errors.MAPPING_SCHEMA, + validator.schema['a_dict']['schema'], + ), child_errors=[ - ((field, subschema_field), - (field, 'schema', subschema_field, 'type'), - errors.BAD_TYPE, 'string'), - ((field, 'city'), (field, 'schema', 'city', 'required'), - errors.REQUIRED_FIELD, True)] + ( + (field, subschema_field), + (field, 'schema', subschema_field, 'type'), + errors.BAD_TYPE, + 'string', + ), + ( + (field, 'city'), + (field, 'schema', 'city', 'required'), + errors.REQUIRED_FIELD, + True, + ), + ], ) handler = errors.BasicErrorHandler assert field in validator.errors assert subschema_field in validator.errors[field][-1] - assert handler.messages[errors.BAD_TYPE.code].format(constraint='string') \ + assert ( + handler.messages[errors.BAD_TYPE.code].format(constraint='string') in validator.errors[field][-1][subschema_field] + ) assert 'city' in validator.errors[field][-1] - assert (handler.messages[errors.REQUIRED_FIELD.code] - in validator.errors[field][-1]['city']) + assert ( + handler.messages[errors.REQUIRED_FIELD.code] + in validator.errors[field][-1]['city'] + ) -def test_bad_valueschema(): - field = 'a_dict_with_valueschema' +def test_bad_valuesrules(): + field = 'a_dict_with_valuesrules' schema_field = 'a_string' value = {schema_field: 'not an integer'} exp_child_errors = [ - ((field, schema_field), (field, 'valueschema', 'type'), errors.BAD_TYPE, - 'integer')] - assert_fail({field: value}, - error=(field, (field, 'valueschema'), errors.VALUESCHEMA, - {'type': 'integer'}), child_errors=exp_child_errors) + ( + (field, schema_field), + (field, 'valuesrules', 'type'), + errors.BAD_TYPE, + 'integer', + ) + ] + assert_fail( + {field: value}, + error=(field, (field, 'valuesrules'), errors.VALUESRULES, {'type': 'integer'}), + child_errors=exp_child_errors, + ) def test_bad_list_of_values(validator): field = 'a_list_of_values' value = ['a string', 'not an integer'] - assert_fail({field: value}, validator=validator, - error=(field, (field, 'items'), errors.BAD_ITEMS, - [{'type': 'string'}, {'type': 'integer'}]), - child_errors=[((field, 1), (field, 'items', 1, 'type'), - errors.BAD_TYPE, 'integer')]) + assert_fail( + {field: value}, + validator=validator, + error=( + field, + (field, 'items'), + errors.BAD_ITEMS, + [{'type': 'string'}, {'type': 'integer'}], + ), + child_errors=[ + ((field, 1), (field, 'items', 1, 'type'), errors.BAD_TYPE, 'integer') + ], + ) - assert (errors.BasicErrorHandler.messages[errors.BAD_TYPE.code]. - format(constraint='integer') - in validator.errors[field][-1][1]) + assert ( + errors.BasicErrorHandler.messages[errors.BAD_TYPE.code].format( + constraint='integer' + ) + in validator.errors[field][-1][1] + ) value = ['a string', 10, 'an extra item'] - assert_fail({field: value}, - error=(field, (field, 'items'), errors.ITEMS_LENGTH, - [{'type': 'string'}, {'type': 'integer'}], (2, 3))) + assert_fail( + {field: value}, + error=( + field, + (field, 'items'), + errors.ITEMS_LENGTH, + [{'type': 'string'}, {'type': 'integer'}], + (2, 3), + ), + ) def test_bad_list_of_integers(): @@ -330,58 +431,81 @@ def test_bad_list_of_integers(): def test_bad_list_of_dicts(): field = 'a_list_of_dicts' - map_schema = {'sku': {'type': 'string'}, - 'price': {'type': 'integer', 'required': True}} + map_schema = { + 'sku': {'type': 'string'}, + 'price': {'type': 'integer', 'required': True}, + } seq_schema = {'type': 'dict', 'schema': map_schema} schema = {field: {'type': 'list', 'schema': seq_schema}} validator = Validator(schema) value = [{'sku': 'KT123', 'price': '100'}] document = {field: value} - assert_fail(document, validator=validator, - error=(field, (field, 'schema'), errors.SEQUENCE_SCHEMA, - seq_schema), - child_errors=[((field, 0), (field, 'schema', 'schema'), - errors.MAPPING_SCHEMA, map_schema)]) + assert_fail( + document, + validator=validator, + error=(field, (field, 'schema'), errors.SEQUENCE_SCHEMA, seq_schema), + child_errors=[ + ((field, 0), (field, 'schema', 'schema'), errors.MAPPING_SCHEMA, map_schema) + ], + ) assert field in validator.errors assert 0 in validator.errors[field][-1] assert 'price' in validator.errors[field][-1][0][-1] - exp_msg = errors.BasicErrorHandler.messages[errors.BAD_TYPE.code] \ - .format(constraint='integer') + exp_msg = errors.BasicErrorHandler.messages[errors.BAD_TYPE.code].format( + constraint='integer' + ) assert exp_msg in validator.errors[field][-1][0][-1]['price'] value = ["not a dict"] - exp_child_errors = [((field, 0), (field, 'schema', 'type'), - errors.BAD_TYPE, 'dict', ())] - assert_fail({field: value}, - error=(field, (field, 'schema'), errors.SEQUENCE_SCHEMA, - seq_schema), - child_errors=exp_child_errors) + exp_child_errors = [ + ((field, 0), (field, 'schema', 'type'), errors.BAD_TYPE, 'dict', ()) + ] + assert_fail( + {field: value}, + error=(field, (field, 'schema'), errors.SEQUENCE_SCHEMA, seq_schema), + child_errors=exp_child_errors, + ) def test_array_unallowed(): field = 'an_array' value = ['agent', 'client', 'profit'] - assert_fail({field: value}, - error=(field, (field, 'allowed'), errors.UNALLOWED_VALUES, - ['agent', 'client', 'vendor'], ['profit'])) + assert_fail( + {field: value}, + error=( + field, + (field, 'allowed'), + errors.UNALLOWED_VALUES, + ['agent', 'client', 'vendor'], + ['profit'], + ), + ) def test_string_unallowed(): field = 'a_restricted_string' value = 'profit' - assert_fail({field: value}, - error=(field, (field, 'allowed'), errors.UNALLOWED_VALUE, - ['agent', 'client', 'vendor'], value)) + assert_fail( + {field: value}, + error=( + field, + (field, 'allowed'), + errors.UNALLOWED_VALUE, + ['agent', 'client', 'vendor'], + value, + ), + ) def test_integer_unallowed(): field = 'a_restricted_integer' value = 2 - assert_fail({field: value}, - error=(field, (field, 'allowed'), errors.UNALLOWED_VALUE, - [-1, 0, 1], value)) + assert_fail( + {field: value}, + error=(field, (field, 'allowed'), errors.UNALLOWED_VALUE, [-1, 0, 1], value), + ) def test_integer_allowed(): @@ -389,10 +513,14 @@ def test_integer_allowed(): def test_validate_update(): - assert_success({'an_integer': 100, - 'a_dict': {'address': 'adr'}, - 'a_list_of_dicts': [{'sku': 'let'}] - }, update=True) + assert_success( + { + 'an_integer': 100, + 'a_dict': {'address': 'adr'}, + 'a_list_of_dicts': [{'sku': 'let'}], + }, + update=True, + ) def test_string(): @@ -437,24 +565,35 @@ def test_one_of_two_types(validator): field = 'one_or_more_strings' assert_success({field: 'foo'}) assert_success({field: ['foo', 'bar']}) - exp_child_errors = [((field, 1), (field, 'schema', 'type'), - errors.BAD_TYPE, 'string')] - assert_fail({field: ['foo', 23]}, validator=validator, - error=(field, (field, 'schema'), errors.SEQUENCE_SCHEMA, - {'type': 'string'}), - child_errors=exp_child_errors) - assert_fail({field: 23}, - error=((field,), (field, 'type'), errors.BAD_TYPE, - ['string', 'list'])) + exp_child_errors = [ + ((field, 1), (field, 'schema', 'type'), errors.BAD_TYPE, 'string') + ] + assert_fail( + {field: ['foo', 23]}, + validator=validator, + error=(field, (field, 'schema'), errors.SEQUENCE_SCHEMA, {'type': 'string'}), + child_errors=exp_child_errors, + ) + assert_fail( + {field: 23}, + error=((field,), (field, 'type'), errors.BAD_TYPE, ['string', 'list']), + ) assert validator.errors == {field: [{1: ['must be of string type']}]} def test_regex(validator): field = 'a_regex_email' assert_success({field: 'valid.email@gmail.com'}, validator=validator) - assert_fail({field: 'invalid'}, update=True, - error=(field, (field, 'regex'), errors.REGEX_MISMATCH, - '^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$')) + assert_fail( + {field: 'invalid'}, + update=True, + error=( + field, + (field, 'regex'), + errors.REGEX_MISMATCH, + r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', + ), + ) def test_a_list_of_dicts(): @@ -462,7 +601,7 @@ def test_a_list_of_dicts(): { 'a_list_of_dicts': [ {'sku': 'AK345', 'price': 100}, - {'sku': 'YZ069', 'price': 25} + {'sku': 'YZ069', 'price': 25}, ] } ) @@ -472,50 +611,84 @@ def test_a_list_of_values(): assert_success({'a_list_of_values': ['hello', 100]}) +def test_an_array_from_set(): + assert_success({'an_array_from_set': ['agent', 'client']}) + + def test_a_list_of_integers(): assert_success({'a_list_of_integers': [99, 100]}) def test_a_dict(schema): - assert_success({'a_dict': {'address': 'i live here', - 'city': 'in my own town'}}) + assert_success({'a_dict': {'address': 'i live here', 'city': 'in my own town'}}) assert_fail( {'a_dict': {'address': 8545}}, - error=('a_dict', ('a_dict', 'schema'), errors.MAPPING_SCHEMA, - schema['a_dict']['schema']), - child_errors=[(('a_dict', 'address'), - ('a_dict', 'schema', 'address', 'type'), - errors.BAD_TYPE, 'string'), - (('a_dict', 'city'), - ('a_dict', 'schema', 'city', 'required'), - errors.REQUIRED_FIELD, True)] + error=( + 'a_dict', + ('a_dict', 'schema'), + errors.MAPPING_SCHEMA, + schema['a_dict']['schema'], + ), + child_errors=[ + ( + ('a_dict', 'address'), + ('a_dict', 'schema', 'address', 'type'), + errors.BAD_TYPE, + 'string', + ), + ( + ('a_dict', 'city'), + ('a_dict', 'schema', 'city', 'required'), + errors.REQUIRED_FIELD, + True, + ), + ], ) -def test_a_dict_with_valueschema(validator): - assert_success({'a_dict_with_valueschema': - {'an integer': 99, 'another integer': 100}}) +def test_a_dict_with_valuesrules(validator): + assert_success( + {'a_dict_with_valuesrules': {'an integer': 99, 'another integer': 100}} + ) error = ( - 'a_dict_with_valueschema', ('a_dict_with_valueschema', 'valueschema'), - errors.VALUESCHEMA, {'type': 'integer'}) + 'a_dict_with_valuesrules', + ('a_dict_with_valuesrules', 'valuesrules'), + errors.VALUESRULES, + {'type': 'integer'}, + ) child_errors = [ - (('a_dict_with_valueschema', 'a string'), - ('a_dict_with_valueschema', 'valueschema', 'type'), - errors.BAD_TYPE, 'integer')] + ( + ('a_dict_with_valuesrules', 'a string'), + ('a_dict_with_valuesrules', 'valuesrules', 'type'), + errors.BAD_TYPE, + 'integer', + ) + ] - assert_fail({'a_dict_with_valueschema': {'a string': '99'}}, - validator=validator, error=error, child_errors=child_errors) + assert_fail( + {'a_dict_with_valuesrules': {'a string': '99'}}, + validator=validator, + error=error, + child_errors=child_errors, + ) - assert 'valueschema' in \ - validator.schema_error_tree['a_dict_with_valueschema'] + assert 'valuesrules' in validator.schema_error_tree['a_dict_with_valuesrules'] v = validator.schema_error_tree - assert len(v['a_dict_with_valueschema']['valueschema'].descendants) == 1 + assert len(v['a_dict_with_valuesrules']['valuesrules'].descendants) == 1 -def test_a_dict_with_keyschema(): - assert_success({'a_dict_with_keyschema': {'key': 'value'}}) - assert_fail({'a_dict_with_keyschema': {'KEY': 'value'}}) +# TODO remove 'keyschema' as rule with the next major release +@mark.parametrize('rule', ('keysrules', 'keyschema')) +def test_keysrules(rule): + schema = { + 'a_dict_with_keysrules': { + 'type': 'dict', + rule: {'type': 'string', 'regex': '[a-z]+'}, + } + } + assert_success({'a_dict_with_keysrules': {'key': 'value'}}, schema=schema) + assert_fail({'a_dict_with_keysrules': {'KEY': 'value'}}, schema=schema) def test_a_list_length(schema): @@ -523,17 +696,31 @@ def test_a_list_length(schema): min_length = schema[field]['minlength'] max_length = schema[field]['maxlength'] - assert_fail({field: [1] * (min_length - 1)}, - error=(field, (field, 'minlength'), errors.MIN_LENGTH, - min_length, (min_length - 1,))) + assert_fail( + {field: [1] * (min_length - 1)}, + error=( + field, + (field, 'minlength'), + errors.MIN_LENGTH, + min_length, + (min_length - 1,), + ), + ) for i in range(min_length, max_length): value = [1] * i assert_success({field: value}) - assert_fail({field: [1] * (max_length + 1)}, - error=(field, (field, 'maxlength'), errors.MAX_LENGTH, - max_length, (max_length + 1,))) + assert_fail( + {field: [1] * (max_length + 1)}, + error=( + field, + (field, 'maxlength'), + errors.MAX_LENGTH, + max_length, + (max_length + 1,), + ), + ) def test_custom_datatype(): @@ -544,11 +731,12 @@ def _validate_type_objectid(self, value): schema = {'test_field': {'type': 'objectid'}} validator = MyValidator(schema) - assert_success({'test_field': '50ad188438345b1049c88a28'}, - validator=validator) - assert_fail({'test_field': 'hello'}, validator=validator, - error=('test_field', ('test_field', 'type'), errors.BAD_TYPE, - 'objectid')) + assert_success({'test_field': '50ad188438345b1049c88a28'}, validator=validator) + assert_fail( + {'test_field': 'hello'}, + validator=validator, + error=('test_field', ('test_field', 'type'), errors.BAD_TYPE, 'objectid'), + ) def test_custom_datatype_rule(): @@ -565,12 +753,16 @@ def _validate_type_number(self, value): schema = {'test_field': {'min_number': 1, 'type': 'number'}} validator = MyValidator(schema) - assert_fail({'test_field': '0'}, validator=validator, - error=('test_field', ('test_field', 'type'), errors.BAD_TYPE, - 'number')) - assert_fail({'test_field': 0}, validator=validator, - error=('test_field', (), errors.CUSTOM, None, - ('Below the min',))) + assert_fail( + {'test_field': '0'}, + validator=validator, + error=('test_field', ('test_field', 'type'), errors.BAD_TYPE, 'number'), + ) + assert_fail( + {'test_field': 0}, + validator=validator, + error=('test_field', (), errors.CUSTOM, None, ('Below the min',)), + ) assert validator.errors == {'test_field': ['Below the min']} @@ -584,14 +776,17 @@ def _validate_isodd(self, isodd, field, value): schema = {'test_field': {'isodd': True}} validator = MyValidator(schema) assert_success({'test_field': 7}, validator=validator) - assert_fail({'test_field': 6}, validator=validator, - error=('test_field', (), errors.CUSTOM, None, - ('Not an odd number',))) + assert_fail( + {'test_field': 6}, + validator=validator, + error=('test_field', (), errors.CUSTOM, None, ('Not an odd number',)), + ) assert validator.errors == {'test_field': ['Not an odd number']} -@mark.parametrize('value, _type', - (('', 'string'), ((), 'list'), ({}, 'dict'), ([], 'list'))) +@mark.parametrize( + 'value, _type', (('', 'string'), ((), 'list'), ({}, 'dict'), ([], 'list')) +) def test_empty_values(value, _type): field = 'test' schema = {field: {'type': _type}} @@ -600,17 +795,18 @@ def test_empty_values(value, _type): assert_success(document, schema) schema[field]['empty'] = False - assert_fail(document, schema, - error=(field, (field, 'empty'), - errors.EMPTY_NOT_ALLOWED, False)) + assert_fail( + document, + schema, + error=(field, (field, 'empty'), errors.EMPTY_NOT_ALLOWED, False), + ) schema[field]['empty'] = True assert_success(document, schema) def test_empty_skips_regex(validator): - schema = {'foo': {'empty': True, 'regex': r'\d?\d\.\d\d', - 'type': 'string'}} + schema = {'foo': {'empty': True, 'regex': r'\d?\d\.\d\d', 'type': 'string'}} assert validator({'foo': ''}, schema) @@ -625,8 +821,9 @@ def test_ignore_none_values(): validator.schema[field]['required'] = True validator.schema.validate() _errors = assert_fail(document, validator=validator) - assert_not_has_error(_errors, field, (field, 'required'), - errors.REQUIRED_FIELD, True) + assert_not_has_error( + _errors, field, (field, 'required'), errors.REQUIRED_FIELD, True + ) # Test ignore None behaviour validator = Validator(schema, ignore_none_values=True) @@ -635,10 +832,8 @@ def test_ignore_none_values(): assert_success(document, validator=validator) validator.schema[field]['required'] = True _errors = assert_fail(schema=schema, document=document, validator=validator) - assert_has_error(_errors, field, (field, 'required'), errors.REQUIRED_FIELD, - True) - assert_not_has_error(_errors, field, (field, 'type'), errors.BAD_TYPE, - 'string') + assert_has_error(_errors, field, (field, 'required'), errors.REQUIRED_FIELD, True) + assert_not_has_error(_errors, field, (field, 'type'), errors.BAD_TYPE, 'string') def test_unknown_keys(): @@ -677,8 +872,7 @@ def test_unknown_keys_list_of_dicts(validator): # test that allow_unknown is honored even for subdicts in lists. # https://github.com/pyeve/cerberus/issues/67. validator.allow_unknown = True - document = {'a_list_of_dicts': [{'sku': 'YZ069', 'price': 25, - 'extra': True}]} + document = {'a_list_of_dicts': [{'sku': 'YZ069', 'price': 25, 'extra': True}]} assert_success(document, validator=validator) @@ -692,8 +886,7 @@ def _validate_type_foo(self, value): validator = CustomValidator({}) validator.allow_unknown = {"type": "foo"} - assert_success(document={"fred": "foo", "barney": "foo"}, - validator=validator) + assert_success(document={"fred": "foo", "barney": "foo"}, validator=validator) def test_nested_unknown_keys(): @@ -701,16 +894,10 @@ def test_nested_unknown_keys(): 'field1': { 'type': 'dict', 'allow_unknown': True, - 'schema': {'nested1': {'type': 'string'}} - } - } - document = { - 'field1': { - 'nested1': 'foo', - 'arb1': 'bar', - 'arb2': 42 + 'schema': {'nested1': {'type': 'string'}}, } } + document = {'field1': {'nested1': 'foo', 'arb1': 'bar', 'arb2': 42}} assert_success(document=document, schema=schema) schema['field1']['allow_unknown'] = {'type': 'string'} @@ -739,8 +926,7 @@ def test_callable_validator(): def test_dependencies_field(): - schema = {'test_field': {'dependencies': 'foo'}, - 'foo': {'type': 'string'}} + schema = {'test_field': {'dependencies': 'foo'}, 'foo': {'type': 'string'}} assert_success({'test_field': 'foobar', 'foo': 'bar'}, schema) assert_fail({'test_field': 'foobar'}, schema) @@ -749,10 +935,9 @@ def test_dependencies_list(): schema = { 'test_field': {'dependencies': ['foo', 'bar']}, 'foo': {'type': 'string'}, - 'bar': {'type': 'string'} + 'bar': {'type': 'string'}, } - assert_success({'test_field': 'foobar', 'foo': 'bar', 'bar': 'foo'}, - schema) + assert_success({'test_field': 'foobar', 'foo': 'bar', 'bar': 'foo'}, schema) assert_fail({'test_field': 'foobar', 'foo': 'bar'}, schema) @@ -760,7 +945,7 @@ def test_dependencies_list_with_required_field(): schema = { 'test_field': {'required': True, 'dependencies': ['foo', 'bar']}, 'foo': {'type': 'string'}, - 'bar': {'type': 'string'} + 'bar': {'type': 'string'}, } # False: all dependencies missing assert_fail({'test_field': 'foobar'}, schema) @@ -784,27 +969,23 @@ def test_dependencies_list_with_subodcuments_fields(): 'test_field': {'dependencies': ['a_dict.foo', 'a_dict.bar']}, 'a_dict': { 'type': 'dict', - 'schema': { - 'foo': {'type': 'string'}, - 'bar': {'type': 'string'} - } - } + 'schema': {'foo': {'type': 'string'}, 'bar': {'type': 'string'}}, + }, } - assert_success({'test_field': 'foobar', - 'a_dict': {'foo': 'foo', 'bar': 'bar'}}, schema) + assert_success( + {'test_field': 'foobar', 'a_dict': {'foo': 'foo', 'bar': 'bar'}}, schema + ) assert_fail({'test_field': 'foobar', 'a_dict': {}}, schema) - assert_fail({'test_field': 'foobar', - 'a_dict': {'foo': 'foo'}}, schema) + assert_fail({'test_field': 'foobar', 'a_dict': {'foo': 'foo'}}, schema) def test_dependencies_dict(): schema = { 'test_field': {'dependencies': {'foo': 'foo', 'bar': 'bar'}}, 'foo': {'type': 'string'}, - 'bar': {'type': 'string'} + 'bar': {'type': 'string'}, } - assert_success({'test_field': 'foobar', 'foo': 'foo', 'bar': 'bar'}, - schema) + assert_success({'test_field': 'foobar', 'foo': 'foo', 'bar': 'bar'}, schema) assert_fail({'test_field': 'foobar', 'foo': 'foo'}, schema) assert_fail({'test_field': 'foobar', 'foo': 'bar'}, schema) assert_fail({'test_field': 'foobar', 'bar': 'bar'}, schema) @@ -814,12 +995,9 @@ def test_dependencies_dict(): def test_dependencies_dict_with_required_field(): schema = { - 'test_field': { - 'required': True, - 'dependencies': {'foo': 'foo', 'bar': 'bar'} - }, + 'test_field': {'required': True, 'dependencies': {'foo': 'foo', 'bar': 'bar'}}, 'foo': {'type': 'string'}, - 'bar': {'type': 'string'} + 'bar': {'type': 'string'}, } # False: all dependencies missing assert_fail({'test_field': 'foobar'}, schema) @@ -833,8 +1011,7 @@ def test_dependencies_dict_with_required_field(): # False: dependency missing assert_fail({'foo': 'bar'}, schema) - assert_success({'test_field': 'foobar', 'foo': 'foo', 'bar': 'bar'}, - schema) + assert_success({'test_field': 'foobar', 'foo': 'foo', 'bar': 'bar'}, schema) # True: dependencies are validated but field is not required schema['test_field']['required'] = False @@ -843,10 +1020,7 @@ def test_dependencies_dict_with_required_field(): def test_dependencies_field_satisfy_nullable_field(): # https://github.com/pyeve/cerberus/issues/305 - schema = { - 'foo': {'nullable': True}, - 'bar': {'dependencies': 'foo'} - } + schema = {'foo': {'nullable': True}, 'bar': {'dependencies': 'foo'}} assert_success({'foo': None, 'bar': 1}, schema) assert_success({'foo': None}, schema) @@ -857,7 +1031,7 @@ def test_dependencies_field_with_mutually_dependent_nullable_fields(): # https://github.com/pyeve/cerberus/pull/306 schema = { 'foo': {'dependencies': 'bar', 'nullable': True}, - 'bar': {'dependencies': 'foo', 'nullable': True} + 'bar': {'dependencies': 'foo', 'nullable': True}, } assert_success({'foo': None, 'bar': None}, schema) assert_success({'foo': 1, 'bar': 1}, schema) @@ -868,63 +1042,75 @@ def test_dependencies_field_with_mutually_dependent_nullable_fields(): def test_dependencies_dict_with_subdocuments_fields(): schema = { - 'test_field': {'dependencies': {'a_dict.foo': ['foo', 'bar'], - 'a_dict.bar': 'bar'}}, + 'test_field': { + 'dependencies': {'a_dict.foo': ['foo', 'bar'], 'a_dict.bar': 'bar'} + }, 'a_dict': { 'type': 'dict', - 'schema': { - 'foo': {'type': 'string'}, - 'bar': {'type': 'string'} - } - } + 'schema': {'foo': {'type': 'string'}, 'bar': {'type': 'string'}}, + }, } - assert_success({'test_field': 'foobar', - 'a_dict': {'foo': 'foo', 'bar': 'bar'}}, schema) - assert_success({'test_field': 'foobar', - 'a_dict': {'foo': 'bar', 'bar': 'bar'}}, schema) + assert_success( + {'test_field': 'foobar', 'a_dict': {'foo': 'foo', 'bar': 'bar'}}, schema + ) + assert_success( + {'test_field': 'foobar', 'a_dict': {'foo': 'bar', 'bar': 'bar'}}, schema + ) assert_fail({'test_field': 'foobar', 'a_dict': {}}, schema) - assert_fail({'test_field': 'foobar', - 'a_dict': {'foo': 'foo', 'bar': 'foo'}}, schema) - assert_fail({'test_field': 'foobar', 'a_dict': {'bar': 'foo'}}, - schema) - assert_fail({'test_field': 'foobar', 'a_dict': {'bar': 'bar'}}, - schema) + assert_fail( + {'test_field': 'foobar', 'a_dict': {'foo': 'foo', 'bar': 'foo'}}, schema + ) + assert_fail({'test_field': 'foobar', 'a_dict': {'bar': 'foo'}}, schema) + assert_fail({'test_field': 'foobar', 'a_dict': {'bar': 'bar'}}, schema) def test_root_relative_dependencies(): # https://github.com/pyeve/cerberus/issues/288 subschema = {'version': {'dependencies': '^repo'}} - schema = {'package': {'allow_unknown': True, 'schema': subschema}, - 'repo': {}} + schema = {'package': {'allow_unknown': True, 'schema': subschema}, 'repo': {}} assert_fail( - {'package': {'repo': 'somewhere', 'version': 0}}, schema, - error=('package', ('package', 'schema'), - errors.MAPPING_SCHEMA, subschema), - child_errors=[( - ('package', 'version'), - ('package', 'schema', 'version', 'dependencies'), - errors.DEPENDENCIES_FIELD, '^repo', ('^repo',) - )] + {'package': {'repo': 'somewhere', 'version': 0}}, + schema, + error=('package', ('package', 'schema'), errors.MAPPING_SCHEMA, subschema), + child_errors=[ + ( + ('package', 'version'), + ('package', 'schema', 'version', 'dependencies'), + errors.DEPENDENCIES_FIELD, + '^repo', + ('^repo',), + ) + ], ) assert_success({'repo': 'somewhere', 'package': {'version': 1}}, schema) def test_dependencies_errors(): - v = Validator({'field1': {'required': False}, - 'field2': {'required': True, - 'dependencies': {'field1': ['one', 'two']}}}) - assert_fail({'field1': 'three', 'field2': 7}, validator=v, - error=('field2', ('field2', 'dependencies'), - errors.DEPENDENCIES_FIELD_VALUE, - {'field1': ['one', 'two']}, ({'field1': 'three'},))) + v = Validator( + { + 'field1': {'required': False}, + 'field2': {'required': True, 'dependencies': {'field1': ['one', 'two']}}, + } + ) + assert_fail( + {'field1': 'three', 'field2': 7}, + validator=v, + error=( + 'field2', + ('field2', 'dependencies'), + errors.DEPENDENCIES_FIELD_VALUE, + {'field1': ['one', 'two']}, + ({'field1': 'three'},), + ), + ) def test_options_passed_to_nested_validators(validator): - validator.schema = {'sub_dict': {'type': 'dict', - 'schema': {'foo': {'type': 'string'}}}} + validator.schema = { + 'sub_dict': {'type': 'dict', 'schema': {'foo': {'type': 'string'}}} + } validator.allow_unknown = True - assert_success({'sub_dict': {'foo': 'bar', 'unknown': True}}, - validator=validator) + assert_success({'sub_dict': {'foo': 'bar', 'unknown': True}}, validator=validator) def test_self_root_document(): @@ -937,8 +1123,7 @@ def test_self_root_document(): class MyValidator(Validator): def _validate_root_doc(self, root_doc, field, value): """ {'type': 'boolean'} """ - if ('sub' not in self.root_document or - len(self.root_document['sub']) != 2): + if 'sub' not in self.root_document or len(self.root_document['sub']) != 2: self._error(field, 'self.context is not the root doc!') schema = { @@ -947,17 +1132,13 @@ def _validate_root_doc(self, root_doc, field, value): 'root_doc': True, 'schema': { 'type': 'dict', - 'schema': { - 'foo': { - 'type': 'string', - 'root_doc': True - } - } - } + 'schema': {'foo': {'type': 'string', 'root_doc': True}}, + }, } } - assert_success({'sub': [{'foo': 'bar'}, {'foo': 'baz'}]}, - validator=MyValidator(schema)) + assert_success( + {'sub': [{'foo': 'bar'}, {'foo': 'baz'}]}, validator=MyValidator(schema) + ) def test_validator_rule(validator): @@ -967,11 +1148,14 @@ def validate_name(field, value, error): validator.schema = { 'name': {'validator': validate_name}, - 'age': {'type': 'integer'} + 'age': {'type': 'integer'}, } - assert_fail({'name': 'ItsMe', 'age': 2}, validator=validator, - error=('name', (), errors.CUSTOM, None, ('must be lowercase',))) + assert_fail( + {'name': 'ItsMe', 'age': 2}, + validator=validator, + error=('name', (), errors.CUSTOM, None, ('must be lowercase',)), + ) assert validator.errors == {'name': ['must be lowercase']} assert_success({'name': 'itsme', 'age': 2}, validator=validator) @@ -992,23 +1176,20 @@ def test_anyof(): assert_success(doc, schema) # prop1 must be either a number between 0 and 10 or 100 and 110 - schema = {'prop1': {'anyof': - [{'min': 0, 'max': 10}, {'min': 100, 'max': 110}]}} + schema = {'prop1': {'anyof': [{'min': 0, 'max': 10}, {'min': 100, 'max': 110}]}} doc = {'prop1': 105} assert_success(doc, schema) # prop1 must be either a number between 0 and 10 or 100 and 110 - schema = {'prop1': {'anyof': - [{'min': 0, 'max': 10}, {'min': 100, 'max': 110}]}} + schema = {'prop1': {'anyof': [{'min': 0, 'max': 10}, {'min': 100, 'max': 110}]}} doc = {'prop1': 50} assert_fail(doc, schema) # prop1 must be an integer that is either be # greater than or equal to 0, or greater than or equal to 10 - schema = {'prop1': {'type': 'integer', - 'anyof': [{'min': 0}, {'min': 10}]}} + schema = {'prop1': {'type': 'integer', 'anyof': [{'min': 0}, {'min': 10}]}} assert_success({'prop1': 10}, schema) # test that intermediate schemas do not sustain assert 'type' not in schema['prop1']['anyof'][0] @@ -1019,12 +1200,14 @@ def test_anyof(): exp_child_errors = [ (('prop1',), ('prop1', 'anyof', 0, 'min'), errors.MIN_VALUE, 0), - (('prop1',), ('prop1', 'anyof', 1, 'min'), errors.MIN_VALUE, 10) + (('prop1',), ('prop1', 'anyof', 1, 'min'), errors.MIN_VALUE, 10), ] - assert_fail({'prop1': -1}, schema, - error=(('prop1',), ('prop1', 'anyof'), errors.ANYOF, - [{'min': 0}, {'min': 10}]), - child_errors=exp_child_errors) + assert_fail( + {'prop1': -1}, + schema, + error=(('prop1',), ('prop1', 'anyof'), errors.ANYOF, [{'min': 0}, {'min': 10}]), + child_errors=exp_child_errors, + ) doc = {'prop1': 5.5} assert_fail(doc, schema) doc = {'prop1': '5.5'} @@ -1033,8 +1216,7 @@ def test_anyof(): def test_allof(): # prop1 has to be a float between 0 and 10 - schema = {'prop1': {'allof': [ - {'type': 'float'}, {'min': 0}, {'max': 10}]}} + schema = {'prop1': {'allof': [{'type': 'float'}, {'min': 0}, {'max': 10}]}} doc = {'prop1': -1} assert_fail(doc, schema) doc = {'prop1': 5} @@ -1067,8 +1249,7 @@ def test_unicode_allowed(): assert_success(doc, schema) -@mark.skipif(sys.version_info[0] < 3, - reason='requires python 3.x') +@mark.skipif(sys.version_info[0] < 3, reason='requires python 3.x') def test_unicode_allowed_py3(): """ All strings are unicode in Python 3.x. Input doc and schema have equal strings and validation yield success.""" @@ -1079,8 +1260,7 @@ def test_unicode_allowed_py3(): assert_success(doc, schema) -@mark.skipif(sys.version_info[0] > 2, - reason='requires python 2.x') +@mark.skipif(sys.version_info[0] > 2, reason='requires python 2.x') def test_unicode_allowed_py2(): """ Python 2.x encodes value of allowed using default encoding if the string includes characters outside ASCII range. Produced string @@ -1098,10 +1278,12 @@ def test_oneof(): # - greater than 0 # - equal to -5, 5, or 15 - schema = {'prop1': {'type': 'integer', 'oneof': [ - {'min': 0}, - {'min': 10}, - {'allowed': [-5, 5, 15]}]}} + schema = { + 'prop1': { + 'type': 'integer', + 'oneof': [{'min': 0}, {'min': 10}, {'allowed': [-5, 5, 15]}], + } + } # document is not valid # prop1 not greater than 0, 10 or equal to -5 @@ -1144,10 +1326,12 @@ def test_noneof(): # - greater than 0 # - equal to -5, 5, or 15 - schema = {'prop1': {'type': 'integer', 'noneof': [ - {'min': 0}, - {'min': 10}, - {'allowed': [-5, 5, 15]}]}} + schema = { + 'prop1': { + 'type': 'integer', + 'noneof': [{'min': 0}, {'min': 10}, {'allowed': [-5, 5, 15]}], + } + } # document is valid doc = {'prop1': -1} @@ -1179,11 +1363,14 @@ def test_noneof(): def test_anyof_allof(): # prop1 can be any number outside of [0-10] - schema = {'prop1': {'allof': [{'anyof': [{'type': 'float'}, - {'type': 'integer'}]}, - {'anyof': [{'min': 10}, - {'max': 0}]} - ]}} + schema = { + 'prop1': { + 'allof': [ + {'anyof': [{'type': 'float'}, {'type': 'integer'}]}, + {'anyof': [{'min': 10}, {'max': 0}]}, + ] + } + } doc = {'prop1': 11} assert_success(doc, schema) @@ -1206,15 +1393,19 @@ def test_anyof_allof(): def test_anyof_schema(validator): # test that a list of schemas can be specified. - valid_parts = [{'schema': {'model number': {'type': 'string'}, - 'count': {'type': 'integer'}}}, - {'schema': {'serial number': {'type': 'string'}, - 'count': {'type': 'integer'}}}] + valid_parts = [ + {'schema': {'model number': {'type': 'string'}, 'count': {'type': 'integer'}}}, + {'schema': {'serial number': {'type': 'string'}, 'count': {'type': 'integer'}}}, + ] valid_item = {'type': ['dict', 'string'], 'anyof': valid_parts} schema = {'parts': {'type': 'list', 'schema': valid_item}} - document = {'parts': [{'model number': 'MX-009', 'count': 100}, - {'serial number': '898-001'}, - 'misc']} + document = { + 'parts': [ + {'model number': 'MX-009', 'count': 100}, + {'serial number': '898-001'}, + 'misc', + ] + } # document is valid. each entry in 'parts' matches a type or schema assert_success(document, schema, validator=validator) @@ -1232,18 +1423,25 @@ def test_anyof_schema(validator): # and invalid. numbers are not allowed. exp_child_errors = [ - (('parts', 3), ('parts', 'schema', 'anyof'), errors.ANYOF, - valid_parts), - (('parts', 4), ('parts', 'schema', 'type'), errors.BAD_TYPE, - ['dict', 'string']) + (('parts', 3), ('parts', 'schema', 'anyof'), errors.ANYOF, valid_parts), + ( + ('parts', 4), + ('parts', 'schema', 'type'), + errors.BAD_TYPE, + ['dict', 'string'], + ), ] - _errors = assert_fail(document, schema, validator=validator, - error=('parts', ('parts', 'schema'), - errors.SEQUENCE_SCHEMA, valid_item), - child_errors=exp_child_errors) - assert_not_has_error(_errors, ('parts', 4), ('parts', 'schema', 'anyof'), - errors.ANYOF, valid_parts) + _errors = assert_fail( + document, + schema, + validator=validator, + error=('parts', ('parts', 'schema'), errors.SEQUENCE_SCHEMA, valid_item), + child_errors=exp_child_errors, + ) + assert_not_has_error( + _errors, ('parts', 4), ('parts', 'schema', 'anyof'), errors.ANYOF, valid_parts + ) # tests errors.BasicErrorHandler's tree representation v_errors = validator.errors @@ -1260,15 +1458,23 @@ def test_anyof_schema(validator): def test_anyof_2(): # these two schema should be the same - schema1 = {'prop': {'anyof': [{'type': 'dict', - 'schema': { - 'val': {'type': 'integer'}}}, - {'type': 'dict', - 'schema': { - 'val': {'type': 'string'}}}]}} - schema2 = {'prop': {'type': 'dict', 'anyof': [ - {'schema': {'val': {'type': 'integer'}}}, - {'schema': {'val': {'type': 'string'}}}]}} + schema1 = { + 'prop': { + 'anyof': [ + {'type': 'dict', 'schema': {'val': {'type': 'integer'}}}, + {'type': 'dict', 'schema': {'val': {'type': 'string'}}}, + ] + } + } + schema2 = { + 'prop': { + 'type': 'dict', + 'anyof': [ + {'schema': {'val': {'type': 'integer'}}}, + {'schema': {'val': {'type': 'string'}}}, + ], + } + } doc = {'prop': {'val': 0}} assert_success(doc, schema1) @@ -1290,47 +1496,69 @@ def test_anyof_type(): def test_oneof_schema(): - schema = {'oneof_schema': {'type': 'dict', - 'oneof_schema': - [{'digits': {'type': 'integer', - 'min': 0, 'max': 99}}, - {'text': {'type': 'string', - 'regex': '^[0-9]{2}$'}}]}} + schema = { + 'oneof_schema': { + 'type': 'dict', + 'oneof_schema': [ + {'digits': {'type': 'integer', 'min': 0, 'max': 99}}, + {'text': {'type': 'string', 'regex': '^[0-9]{2}$'}}, + ], + } + } assert_success({'oneof_schema': {'digits': 19}}, schema) assert_success({'oneof_schema': {'text': '84'}}, schema) assert_fail({'oneof_schema': {'digits': 19, 'text': '84'}}, schema) def test_nested_oneof_type(): - schema = {'nested_oneof_type': - {'valueschema': {'oneof_type': ['string', 'integer']}}} + schema = { + 'nested_oneof_type': {'valuesrules': {'oneof_type': ['string', 'integer']}} + } assert_success({'nested_oneof_type': {'foo': 'a'}}, schema) assert_success({'nested_oneof_type': {'bar': 3}}, schema) def test_nested_oneofs(validator): - validator.schema = {'abc': { - 'type': 'dict', - 'oneof_schema': [ - {'foo': { - 'type': 'dict', - 'schema': {'bar': {'oneof_type': ['integer', 'float']}} - }}, - {'baz': {'type': 'string'}} - ]}} + validator.schema = { + 'abc': { + 'type': 'dict', + 'oneof_schema': [ + { + 'foo': { + 'type': 'dict', + 'schema': {'bar': {'oneof_type': ['integer', 'float']}}, + } + }, + {'baz': {'type': 'string'}}, + ], + } + } document = {'abc': {'foo': {'bar': 'bad'}}} expected_errors = { 'abc': [ 'none or more than one rule validate', - {'oneof definition 0': [ - {'foo': [{'bar': [ - 'none or more than one rule validate', - {'oneof definition 0': ['must be of integer type'], - 'oneof definition 1': ['must be of float type']} - ]}]}], - 'oneof definition 1': [{'foo': ['unknown field']}]} + { + 'oneof definition 0': [ + { + 'foo': [ + { + 'bar': [ + 'none or more than one rule validate', + { + 'oneof definition 0': [ + 'must be of integer type' + ], + 'oneof definition 1': ['must be of float type'], + }, + ] + } + ] + } + ], + 'oneof definition 1': [{'foo': ['unknown field']}], + }, ] } @@ -1339,21 +1567,23 @@ def test_nested_oneofs(validator): def test_no_of_validation_if_type_fails(validator): - valid_parts = [{'schema': {'model number': {'type': 'string'}, - 'count': {'type': 'integer'}}}, - {'schema': {'serial number': {'type': 'string'}, - 'count': {'type': 'integer'}}}] - validator.schema = {'part': {'type': ['dict', 'string'], - 'anyof': valid_parts}} + valid_parts = [ + {'schema': {'model number': {'type': 'string'}, 'count': {'type': 'integer'}}}, + {'schema': {'serial number': {'type': 'string'}, 'count': {'type': 'integer'}}}, + ] + validator.schema = {'part': {'type': ['dict', 'string'], 'anyof': valid_parts}} document = {'part': 10} _errors = assert_fail(document, validator=validator) assert len(_errors) == 1 def test_issue_107(validator): - schema = {'info': {'type': 'dict', - 'schema': {'name': {'type': 'string', - 'required': True}}}} + schema = { + 'info': { + 'type': 'dict', + 'schema': {'name': {'type': 'string', 'required': True}}, + } + } document = {'info': {'name': 'my name'}} assert_success(document, schema, validator=validator) @@ -1369,20 +1599,23 @@ def test_dont_type_validate_nulled_values(validator): def test_dependencies_error(validator): - schema = {'field1': {'required': False}, - 'field2': {'required': True, - 'dependencies': {'field1': ['one', 'two']}}} + schema = { + 'field1': {'required': False}, + 'field2': {'required': True, 'dependencies': {'field1': ['one', 'two']}}, + } validator.validate({'field2': 7}, schema) - exp_msg = errors.BasicErrorHandler \ - .messages[errors.DEPENDENCIES_FIELD_VALUE.code] \ - .format(field='field2', constraint={'field1': ['one', 'two']}) + exp_msg = errors.BasicErrorHandler.messages[ + errors.DEPENDENCIES_FIELD_VALUE.code + ].format(field='field2', constraint={'field1': ['one', 'two']}) assert validator.errors == {'field2': [exp_msg]} def test_dependencies_on_boolean_field_with_one_value(): # https://github.com/pyeve/cerberus/issues/138 - schema = {'deleted': {'type': 'boolean'}, - 'text': {'dependencies': {'deleted': False}}} + schema = { + 'deleted': {'type': 'boolean'}, + 'text': {'dependencies': {'deleted': False}}, + } try: assert_success({'text': 'foo', 'deleted': False}, schema) assert_fail({'text': 'foo', 'deleted': True}, schema) @@ -1392,15 +1625,18 @@ def test_dependencies_on_boolean_field_with_one_value(): raise AssertionError( "Bug #138 still exists, couldn't use boolean in dependency " "without putting it in a list.\n" - "'some_field': True vs 'some_field: [True]") + "'some_field': True vs 'some_field: [True]" + ) else: raise def test_dependencies_on_boolean_field_with_value_in_list(): # https://github.com/pyeve/cerberus/issues/138 - schema = {'deleted': {'type': 'boolean'}, - 'text': {'dependencies': {'deleted': [False]}}} + schema = { + 'deleted': {'type': 'boolean'}, + 'text': {'dependencies': {'deleted': [False]}}, + } assert_success({'text': 'foo', 'deleted': False}, schema) assert_fail({'text': 'foo', 'deleted': True}, schema) @@ -1423,9 +1659,10 @@ def _validate_trail(self, constraint, field, value): def test_excludes(): - schema = {'this_field': {'type': 'dict', - 'excludes': 'that_field'}, - 'that_field': {'type': 'dict'}} + schema = { + 'this_field': {'type': 'dict', 'excludes': 'that_field'}, + 'that_field': {'type': 'dict'}, + } assert_success({'this_field': {}}, schema) assert_success({'that_field': {}}, schema) assert_success({}, schema) @@ -1433,10 +1670,10 @@ def test_excludes(): def test_mutual_excludes(): - schema = {'this_field': {'type': 'dict', - 'excludes': 'that_field'}, - 'that_field': {'type': 'dict', - 'excludes': 'this_field'}} + schema = { + 'this_field': {'type': 'dict', 'excludes': 'that_field'}, + 'that_field': {'type': 'dict', 'excludes': 'this_field'}, + } assert_success({'this_field': {}}, schema) assert_success({'that_field': {}}, schema) assert_success({}, schema) @@ -1444,12 +1681,10 @@ def test_mutual_excludes(): def test_required_excludes(): - schema = {'this_field': {'type': 'dict', - 'excludes': 'that_field', - 'required': True}, - 'that_field': {'type': 'dict', - 'excludes': 'this_field', - 'required': True}} + schema = { + 'this_field': {'type': 'dict', 'excludes': 'that_field', 'required': True}, + 'that_field': {'type': 'dict', 'excludes': 'this_field', 'required': True}, + } assert_success({'this_field': {}}, schema, update=False) assert_success({'that_field': {}}, schema, update=False) assert_fail({}, schema) @@ -1457,11 +1692,11 @@ def test_required_excludes(): def test_multiples_exclusions(): - schema = {'this_field': {'type': 'dict', - 'excludes': ['that_field', 'bazo_field']}, - 'that_field': {'type': 'dict', - 'excludes': 'this_field'}, - 'bazo_field': {'type': 'dict'}} + schema = { + 'this_field': {'type': 'dict', 'excludes': ['that_field', 'bazo_field']}, + 'that_field': {'type': 'dict', 'excludes': 'this_field'}, + 'bazo_field': {'type': 'dict'}, + } assert_success({'this_field': {}}, schema) assert_success({'that_field': {}}, schema) assert_fail({'this_field': {}, 'that_field': {}}, schema) @@ -1471,21 +1706,28 @@ def test_multiples_exclusions(): def test_bad_excludes_fields(validator): - validator.schema = {'this_field': {'type': 'dict', - 'excludes': ['that_field', 'bazo_field'], - 'required': True}, - 'that_field': {'type': 'dict', - 'excludes': 'this_field', - 'required': True}} + validator.schema = { + 'this_field': { + 'type': 'dict', + 'excludes': ['that_field', 'bazo_field'], + 'required': True, + }, + 'that_field': {'type': 'dict', 'excludes': 'this_field', 'required': True}, + } assert_fail({'that_field': {}, 'this_field': {}}, validator=validator) handler = errors.BasicErrorHandler - assert (validator.errors == - {'that_field': - [handler.messages[errors.EXCLUDES_FIELD.code].format( - "'this_field'", field="that_field")], - 'this_field': - [handler.messages[errors.EXCLUDES_FIELD.code].format( - "'that_field', 'bazo_field'", field="this_field")]}) + assert validator.errors == { + 'that_field': [ + handler.messages[errors.EXCLUDES_FIELD.code].format( + "'this_field'", field="that_field" + ) + ], + 'this_field': [ + handler.messages[errors.EXCLUDES_FIELD.code].format( + "'that_field', 'bazo_field'", field="this_field" + ) + ], + } def test_boolean_is_not_a_number(): @@ -1511,17 +1753,29 @@ def test_forbidden(): assert_success({'user': 'alice'}, schema) +def test_forbidden_number(): + schema = {'amount': {'forbidden': (0, 0.0)}} + assert_fail({'amount': 0}, schema) + assert_fail({'amount': 0.0}, schema) + + def test_mapping_with_sequence_schema(): schema = {'list': {'schema': {'allowed': ['a', 'b', 'c']}}} document = {'list': {'is_a': 'mapping'}} - assert_fail(document, schema, - error=('list', ('list', 'schema'), errors.BAD_TYPE_FOR_SCHEMA, - schema['list']['schema'])) + assert_fail( + document, + schema, + error=( + 'list', + ('list', 'schema'), + errors.BAD_TYPE_FOR_SCHEMA, + schema['list']['schema'], + ), + ) def test_sequence_with_mapping_schema(): - schema = {'list': {'schema': {'foo': {'allowed': ['a', 'b', 'c']}}, - 'type': 'dict'}} + schema = {'list': {'schema': {'foo': {'allowed': ['a', 'b', 'c']}}, 'type': 'dict'}} document = {'list': ['a', 'b', 'c']} assert_fail(document, schema) @@ -1529,19 +1783,24 @@ def test_sequence_with_mapping_schema(): def test_type_error_aborts_validation(): schema = {'foo': {'type': 'string', 'allowed': ['a']}} document = {'foo': 0} - assert_fail(document, schema, - error=('foo', ('foo', 'type'), errors.BAD_TYPE, 'string')) + assert_fail( + document, schema, error=('foo', ('foo', 'type'), errors.BAD_TYPE, 'string') + ) def test_dependencies_in_oneof(): # https://github.com/pyeve/cerberus/issues/241 - schema = {'a': {'type': 'integer', - 'oneof': [ - {'allowed': [1], 'dependencies': 'b'}, - {'allowed': [2], 'dependencies': 'c'} - ]}, - 'b': {}, - 'c': {}} + schema = { + 'a': { + 'type': 'integer', + 'oneof': [ + {'allowed': [1], 'dependencies': 'b'}, + {'allowed': [2], 'dependencies': 'c'}, + ], + }, + 'b': {}, + 'c': {}, + } assert_success({'a': 1, 'b': 'foo'}, schema) assert_success({'a': 2, 'c': 'bar'}, schema) assert_fail({'a': 1, 'c': 'foo'}, schema) @@ -1556,12 +1815,9 @@ def test_allow_unknown_with_oneof_rules(validator): { 'type': 'dict', 'allow_unknown': True, - 'schema': {'known': {'type': 'string'}} - }, - { - 'type': 'dict', - 'schema': {'known': {'type': 'string'}} + 'schema': {'known': {'type': 'string'}}, }, + {'type': 'dict', 'schema': {'known': {'type': 'string'}}}, ] } } @@ -1571,9 +1827,122 @@ def test_allow_unknown_with_oneof_rules(validator): validator(document, schema) _errors = validator._errors assert len(_errors) == 1 - assert_has_error(_errors, 'test', ('test', 'oneof'), - errors.ONEOF, schema['test']['oneof']) + assert_has_error( + _errors, 'test', ('test', 'oneof'), errors.ONEOF, schema['test']['oneof'] + ) assert len(_errors[0].child_errors) == 0 # check that allow_unknown is actually applied document = {'test': {'known': 's', 'unknown': 'asd'}} assert_success(document, validator=validator) + + +@mark.parametrize('constraint', (('Graham Chapman', 'Eric Idle'), 'Terry Gilliam')) +def test_contains(constraint): + validator = Validator({'actors': {'contains': constraint}}) + + document = {'actors': ('Graham Chapman', 'Eric Idle', 'Terry Gilliam')} + assert validator(document) + + document = {'actors': ('Eric idle', 'Terry Jones', 'John Cleese', 'Michael Palin')} + assert not validator(document) + assert errors.MISSING_MEMBERS in validator.document_error_tree['actors'] + missing_actors = validator.document_error_tree['actors'][ + errors.MISSING_MEMBERS + ].info[0] + assert any(x in missing_actors for x in ('Eric Idle', 'Terry Gilliam')) + + +def test_require_all_simple(): + schema = {'foo': {'type': 'string'}} + validator = Validator(require_all=True) + assert_fail( + {}, + schema, + validator, + error=('foo', '__require_all__', errors.REQUIRED_FIELD, True), + ) + assert_success({'foo': 'bar'}, schema, validator) + validator.require_all = False + assert_success({}, schema, validator) + assert_success({'foo': 'bar'}, schema, validator) + + +def test_require_all_override_by_required(): + schema = {'foo': {'type': 'string', 'required': False}} + validator = Validator(require_all=True) + assert_success({}, schema, validator) + assert_success({'foo': 'bar'}, schema, validator) + validator.require_all = False + assert_success({}, schema, validator) + assert_success({'foo': 'bar'}, schema, validator) + + schema = {'foo': {'type': 'string', 'required': True}} + validator.require_all = True + assert_fail( + {}, + schema, + validator, + error=('foo', ('foo', 'required'), errors.REQUIRED_FIELD, True), + ) + assert_success({'foo': 'bar'}, schema, validator) + validator.require_all = False + assert_fail( + {}, + schema, + validator, + error=('foo', ('foo', 'required'), errors.REQUIRED_FIELD, True), + ) + assert_success({'foo': 'bar'}, schema, validator) + + +@mark.parametrize( + "validator_require_all, sub_doc_require_all", + list(itertools.product([True, False], repeat=2)), +) +def test_require_all_override_by_subdoc_require_all( + validator_require_all, sub_doc_require_all +): + sub_schema = {"bar": {"type": "string"}} + schema = { + "foo": { + "type": "dict", + "require_all": sub_doc_require_all, + "schema": sub_schema, + } + } + validator = Validator(require_all=validator_require_all) + + assert_success({"foo": {"bar": "baz"}}, schema, validator) + if validator_require_all: + assert_fail({}, schema, validator) + else: + assert_success({}, schema, validator) + if sub_doc_require_all: + assert_fail({"foo": {}}, schema, validator) + else: + assert_success({"foo": {}}, schema, validator) + + +def test_require_all_and_exclude(): + schema = { + 'foo': {'type': 'string', 'excludes': 'bar'}, + 'bar': {'type': 'string', 'excludes': 'foo'}, + } + validator = Validator(require_all=True) + assert_fail( + {}, + schema, + validator, + errors=[ + ('foo', '__require_all__', errors.REQUIRED_FIELD, True), + ('bar', '__require_all__', errors.REQUIRED_FIELD, True), + ], + ) + assert_success({'foo': 'value'}, schema, validator) + assert_success({'bar': 'value'}, schema, validator) + assert_fail({'foo': 'value', 'bar': 'value'}, schema, validator) + validator.require_all = False + assert_success({}, schema, validator) + assert_success({'foo': 'value'}, schema, validator) + assert_success({'bar': 'value'}, schema, validator) + assert_fail({'foo': 'value', 'bar': 'value'}, schema, validator) diff --git a/pipenv/vendor/cerberus/utils.py b/pipenv/vendor/cerberus/utils.py index f10d39761b..5a015d64b3 100644 --- a/pipenv/vendor/cerberus/utils.py +++ b/pipenv/vendor/cerberus/utils.py @@ -1,12 +1,11 @@ from __future__ import absolute_import -from collections import Mapping, namedtuple, Sequence +from collections import namedtuple -from cerberus.platform import _int_types, _str_type +from cerberus.platform import _int_types, _str_type, Mapping, Sequence, Set -TypeDefinition = namedtuple('TypeDefinition', - 'name,included_types,excluded_types') +TypeDefinition = namedtuple('TypeDefinition', 'name,included_types,excluded_types') """ This class is used to define types that can be used as value in the :attr:`~cerberus.Validator.types_mapping` property. @@ -19,19 +18,33 @@ def compare_paths_lt(x, y): - for i in range(min(len(x), len(y))): - if isinstance(x[i], type(y[i])): - if x[i] != y[i]: - return x[i] < y[i] - elif isinstance(x[i], _int_types): + min_length = min(len(x), len(y)) + + if x[:min_length] == y[:min_length]: + return len(x) == min_length + + for i in range(min_length): + a, b = x[i], y[i] + + for _type in (_int_types, _str_type, tuple): + if isinstance(a, _type): + if isinstance(b, _type): + break + else: + return True + + if a == b: + continue + elif a < b: return True - elif isinstance(y[i], _int_types): + else: return False - return len(x) < len(y) + + raise RuntimeError def drop_item_from_tuple(t, i): - return t[:i] + t[i + 1:] + return t[:i] + t[i + 1 :] def get_Validator_class(): @@ -50,26 +63,24 @@ def mapping_to_frozenset(mapping): equal. As it is used to identify equality of schemas, this can be considered okay as definitions are semantically equal regardless the container type. """ - mapping = mapping.copy() + + aggregation = {} + for key, value in mapping.items(): if isinstance(value, Mapping): - mapping[key] = mapping_to_frozenset(value) + aggregation[key] = mapping_to_frozenset(value) elif isinstance(value, Sequence): value = list(value) for i, item in enumerate(value): if isinstance(item, Mapping): value[i] = mapping_to_frozenset(item) - mapping[key] = tuple(value) - return frozenset(mapping.items()) + aggregation[key] = tuple(value) + elif isinstance(value, Set): + aggregation[key] = frozenset(value) + else: + aggregation[key] = value - -def isclass(obj): - try: - issubclass(obj, object) - except TypeError: - return False - else: - return True + return frozenset(aggregation.items()) def quote_string(value): diff --git a/pipenv/vendor/cerberus/validator.py b/pipenv/vendor/cerberus/validator.py index 27a2905323..ed1c153678 100644 --- a/pipenv/vendor/cerberus/validator.py +++ b/pipenv/vendor/cerberus/validator.py @@ -11,28 +11,41 @@ from __future__ import absolute_import from ast import literal_eval -from collections import Hashable, Iterable, Mapping, Sequence from copy import copy from datetime import date, datetime import re from warnings import warn from cerberus import errors -from cerberus.platform import _int_types, _str_type -from cerberus.schema import (schema_registry, rules_set_registry, - DefinitionSchema, SchemaError) -from cerberus.utils import (drop_item_from_tuple, isclass, - readonly_classproperty, TypeDefinition) - +from cerberus.platform import ( + _int_types, + _str_type, + Container, + Hashable, + Iterable, + Mapping, + Sequence, + Sized, +) +from cerberus.schema import ( + schema_registry, + rules_set_registry, + DefinitionSchema, + SchemaError, +) +from cerberus.utils import drop_item_from_tuple, readonly_classproperty, TypeDefinition toy_error_handler = errors.ToyErrorHandler() def dummy_for_rule_validation(rule_constraints): def dummy(self, constraint, field, value): - raise RuntimeError('Dummy method called. Its purpose is to hold just' - 'validation constraints for a rule in its ' - 'docstring.') + raise RuntimeError( + 'Dummy method called. Its purpose is to hold just' + 'validation constraints for a rule in its ' + 'docstring.' + ) + f = dummy f.__doc__ = rule_constraints return f @@ -40,12 +53,14 @@ def dummy(self, constraint, field, value): class DocumentError(Exception): """ Raised when the target document is missing or has the wrong format """ + pass class _SchemaRuleTypeError(Exception): """ Raised when a schema (list) validation encounters a mapping. Not supposed to be used outside this module. """ + pass @@ -76,9 +91,15 @@ class BareValidator(object): :param allow_unknown: See :attr:`~cerberus.Validator.allow_unknown`. Defaults to ``False``. :type allow_unknown: :class:`bool` or any :term:`mapping` + :param require_all: See :attr:`~cerberus.Validator.require_all`. + Defaults to ``False``. + :type require_all: :class:`bool` :param purge_unknown: See :attr:`~cerberus.Validator.purge_unknown`. Defaults to to ``False``. :type purge_unknown: :class:`bool` + :param purge_readonly: Removes all fields that are defined as ``readonly`` in the + normalization phase. + :type purge_readonly: :class:`bool` :param error_handler: The error handler that formats the result of :attr:`~cerberus.Validator.errors`. When given as two-value tuple with an error-handler @@ -98,28 +119,18 @@ class and a dictionary, the latter is passed to the """ Rules that will be processed in that order before any other. Type: :class:`tuple` """ types_mapping = { - 'binary': - TypeDefinition('binary', (bytes, bytearray), ()), - 'boolean': - TypeDefinition('boolean', (bool,), ()), - 'date': - TypeDefinition('date', (date,), ()), - 'datetime': - TypeDefinition('datetime', (datetime,), ()), - 'dict': - TypeDefinition('dict', (Mapping,), ()), - 'float': - TypeDefinition('float', (float, _int_types), ()), - 'integer': - TypeDefinition('integer', (_int_types,), ()), - 'list': - TypeDefinition('list', (Sequence,), (_str_type,)), - 'number': - TypeDefinition('number', (_int_types, float), (bool,)), - 'set': - TypeDefinition('set', (set,), ()), - 'string': - TypeDefinition('string', (_str_type), ()) + 'binary': TypeDefinition('binary', (bytes, bytearray), ()), + 'boolean': TypeDefinition('boolean', (bool,), ()), + 'container': TypeDefinition('container', (Container,), (_str_type,)), + 'date': TypeDefinition('date', (date,), ()), + 'datetime': TypeDefinition('datetime', (datetime,), ()), + 'dict': TypeDefinition('dict', (Mapping,), ()), + 'float': TypeDefinition('float', (float, _int_types), ()), + 'integer': TypeDefinition('integer', (_int_types,), ()), + 'list': TypeDefinition('list', (Sequence,), (_str_type,)), + 'number': TypeDefinition('number', (_int_types, float), (bool,)), + 'set': TypeDefinition('set', (set,), ()), + 'string': TypeDefinition('string', (_str_type), ()), } """ This mapping holds all available constraints for the type rule and their assigned :class:`~cerberus.TypeDefinition`. """ @@ -131,7 +142,8 @@ def __init__(self, *args, **kwargs): """ The arguments will be treated as with this signature: __init__(self, schema=None, ignore_none_values=False, - allow_unknown=False, purge_unknown=False, + allow_unknown=False, require_all=False, + purge_unknown=False, purge_readonly=False, error_handler=errors.BasicErrorHandler) """ @@ -168,6 +180,7 @@ def __init__(self, *args, **kwargs): self.__store_config(args, kwargs) self.schema = kwargs.get('schema', None) self.allow_unknown = kwargs.get('allow_unknown', False) + self.require_all = kwargs.get('require_all', False) self._remaining_rules = [] """ Keeps track of the rules that are next in line to be evaluated during the validation of a field. @@ -182,8 +195,9 @@ def __init_error_handler(kwargs): error_handler, eh_config = error_handler else: eh_config = {} - if isclass(error_handler) and \ - issubclass(error_handler, errors.BaseErrorHandler): + if isinstance(error_handler, type) and issubclass( + error_handler, errors.BaseErrorHandler + ): return error_handler(**eh_config) elif isinstance(error_handler, errors.BaseErrorHandler): return error_handler @@ -192,12 +206,17 @@ def __init_error_handler(kwargs): def __store_config(self, args, kwargs): """ Assign args to kwargs and store configuration. """ - signature = ('schema', 'ignore_none_values', 'allow_unknown', - 'purge_unknown') - for i, p in enumerate(signature[:len(args)]): + signature = ( + 'schema', + 'ignore_none_values', + 'allow_unknown', + 'require_all', + 'purge_unknown', + 'purge_readonly', + ) + for i, p in enumerate(signature[: len(args)]): if p in kwargs: - raise TypeError("__init__ got multiple values for argument " - "'%s'" % p) + raise TypeError("__init__ got multiple values for argument " "'%s'" % p) else: kwargs[p] = args[i] self._config = kwargs @@ -251,8 +270,8 @@ def _error(self, *args): self._errors.extend(args[0]) self._errors.sort() for error in args[0]: - self.document_error_tree += error - self.schema_error_tree += error + self.document_error_tree.add(error) + self.schema_error_tree.add(error) self.error_handler.emit(error) elif len(args) == 2 and isinstance(args[1], _str_type): self._error(args[0], errors.CUSTOM, args[1]) @@ -262,7 +281,7 @@ def _error(self, *args): rule = args[1].rule info = args[2:] - document_path = self.document_path + (field, ) + document_path = self.document_path + (field,) schema_path = self.schema_path if code != errors.UNKNOWN_FIELD.code and rule is not None: @@ -274,6 +293,10 @@ def _error(self, *args): field_definitions = self._resolve_rules_set(self.schema[field]) if rule == 'nullable': constraint = field_definitions.get(rule, False) + elif rule == 'required': + constraint = field_definitions.get(rule, self.require_all) + if rule not in field_definitions: + schema_path = "__require_all__" else: constraint = field_definitions[rule] @@ -284,8 +307,7 @@ def _error(self, *args): ) self._error([self.recent_error]) - def _get_child_validator(self, document_crumb=None, schema_crumb=None, - **kwargs): + def _get_child_validator(self, document_crumb=None, schema_crumb=None, **kwargs): """ Creates a new instance of Validator-(sub-)class. All initial parameters of the parent are passed to the initialization, unless a parameter is given as an explicit *keyword*-parameter. @@ -309,6 +331,7 @@ def _get_child_validator(self, document_crumb=None, schema_crumb=None, child_config['is_child'] = True child_config['error_handler'] = toy_error_handler child_config['root_allow_unknown'] = self.allow_unknown + child_config['root_require_all'] = self.require_all child_config['root_document'] = self.document child_config['root_schema'] = self.schema @@ -318,14 +341,14 @@ def _get_child_validator(self, document_crumb=None, schema_crumb=None, child_validator.document_path = self.document_path else: if not isinstance(document_crumb, tuple): - document_crumb = (document_crumb, ) + document_crumb = (document_crumb,) child_validator.document_path = self.document_path + document_crumb if schema_crumb is None: child_validator.schema_path = self.schema_path else: if not isinstance(schema_crumb, tuple): - schema_crumb = (schema_crumb, ) + schema_crumb = (schema_crumb,) child_validator.schema_path = self.schema_path + schema_crumb return child_validator @@ -334,8 +357,10 @@ def __get_rule_handler(self, domain, rule): methodname = '_{0}_{1}'.format(domain, rule.replace(' ', '_')) result = getattr(self, methodname, None) if result is None: - raise RuntimeError("There's no handler for '{}' in the '{}' " - "domain.".format(rule, domain)) + raise RuntimeError( + "There's no handler for '{}' in the '{}' " + "domain.".format(rule, domain) + ) return result def _drop_nodes_from_errorpaths(self, _errors, dp_items, sp_items): @@ -351,14 +376,15 @@ def _drop_nodes_from_errorpaths(self, _errors, dp_items, sp_items): sp_basedepth = len(self.schema_path) for error in _errors: for i in sorted(dp_items, reverse=True): - error.document_path = \ - drop_item_from_tuple(error.document_path, dp_basedepth + i) + error.document_path = drop_item_from_tuple( + error.document_path, dp_basedepth + i + ) for i in sorted(sp_items, reverse=True): - error.schema_path = \ - drop_item_from_tuple(error.schema_path, sp_basedepth + i) + error.schema_path = drop_item_from_tuple( + error.schema_path, sp_basedepth + i + ) if error.child_errors: - self._drop_nodes_from_errorpaths(error.child_errors, - dp_items, sp_items) + self._drop_nodes_from_errorpaths(error.child_errors, dp_items, sp_items) def _lookup_field(self, path): """ Searches for a field as defined by path. This method is used by the @@ -377,8 +403,7 @@ def _lookup_field(self, path): """ if path.startswith('^'): path = path[1:] - context = self.document if path.startswith('^') \ - else self.root_document + context = self.document if path.startswith('^') else self.root_document else: context = self.document @@ -386,7 +411,7 @@ def _lookup_field(self, path): for part in parts: if part not in context: return None, None - context = context.get(part) + context = context.get(part, {}) return parts[-1], context @@ -421,6 +446,17 @@ def allow_unknown(self, value): DefinitionSchema(self, {'allow_unknown': value}) self._config['allow_unknown'] = value + @property + def require_all(self): + """ If ``True`` known fields that are defined in the schema will + be required. + Type: :class:`bool` """ + return self._config.get('require_all', False) + + @require_all.setter + def require_all(self, value): + self._config['require_all'] = value + @property def errors(self): """ The errors of the last processing formatted by the handler that is @@ -455,7 +491,7 @@ def _is_normalized(self, value): @property def purge_unknown(self): - """ If ``True`` unknown fields will be deleted from the document + """ If ``True``, unknown fields will be deleted from the document unless a validation is called with disabled normalization. Also see :ref:`purging-unknown-fields`. Type: :class:`bool` """ return self._config.get('purge_unknown', False) @@ -464,12 +500,29 @@ def purge_unknown(self): def purge_unknown(self, value): self._config['purge_unknown'] = value + @property + def purge_readonly(self): + """ If ``True``, fields declared as readonly will be deleted from the + document unless a validation is called with disabled normalization. + Type: :class:`bool` """ + return self._config.get('purge_readonly', False) + + @purge_readonly.setter + def purge_readonly(self, value): + self._config['purge_readonly'] = value + @property def root_allow_unknown(self): """ The :attr:`~cerberus.Validator.allow_unknown` attribute of the first level ancestor of a child validator. """ return self._config.get('root_allow_unknown', self.allow_unknown) + @property + def root_require_all(self): + """ The :attr:`~cerberus.Validator.require_all` attribute of + the first level ancestor of a child validator. """ + return self._config.get('root_require_all', self.require_all) + @property def root_document(self): """ The :attr:`~cerberus.Validator.document` attribute of the @@ -524,12 +577,12 @@ def schema_registry(self, registry): def types(cls): """ The constraints that can be used for the 'type' rule. Type: A tuple of strings. """ - redundant_types = \ - set(cls.types_mapping) & set(cls._types_from_methods) + redundant_types = set(cls.types_mapping) & set(cls._types_from_methods) if redundant_types: - warn("These types are defined both with a method and in the" - "'types_mapping' property of this validator: %s" - % redundant_types) + warn( + "These types are defined both with a method and in the" + "'types_mapping' property of this validator: %s" % redundant_types + ) return tuple(cls.types_mapping) + cls._types_from_methods @@ -554,8 +607,7 @@ def __init_processing(self, document, schema=None): if document is None: raise DocumentError(errors.DOCUMENT_MISSING) if not isinstance(document, Mapping): - raise DocumentError( - errors.DOCUMENT_FORMAT.format(document)) + raise DocumentError(errors.DOCUMENT_FORMAT.format(document)) self.error_handler.start(self) def _drop_remaining_rules(self, *rules): @@ -608,6 +660,8 @@ def __normalize_mapping(self, mapping, schema): self.__normalize_rename_fields(mapping, schema) if self.purge_unknown and not self.allow_unknown: self._normalize_purge_unknown(mapping, schema) + if self.purge_readonly: + self.__normalize_purge_readonly(mapping, schema) # Check `readonly` fields before applying default values because # a field's schema definition might contain both `readonly` and # `default`. @@ -631,13 +685,23 @@ def _normalize_coerce(self, mapping, schema): for field in mapping: if field in schema and 'coerce' in schema[field]: mapping[field] = self.__normalize_coerce( - schema[field]['coerce'], field, mapping[field], - schema[field].get('nullable', False), error) - elif isinstance(self.allow_unknown, Mapping) and \ - 'coerce' in self.allow_unknown: + schema[field]['coerce'], + field, + mapping[field], + schema[field].get('nullable', False), + error, + ) + elif ( + isinstance(self.allow_unknown, Mapping) + and 'coerce' in self.allow_unknown + ): mapping[field] = self.__normalize_coerce( - self.allow_unknown['coerce'], field, mapping[field], - self.allow_unknown.get('nullable', False), error) + self.allow_unknown['coerce'], + field, + mapping[field], + self.allow_unknown.get('nullable', False), + error, + ) def __normalize_coerce(self, processor, field, value, nullable, error): if isinstance(processor, _str_type): @@ -646,52 +710,60 @@ def __normalize_coerce(self, processor, field, value, nullable, error): elif isinstance(processor, Iterable): result = value for p in processor: - result = self.__normalize_coerce(p, field, result, - nullable, error) - if errors.COERCION_FAILED in \ - self.document_error_tree.fetch_errors_from( - self.document_path + (field,)): + result = self.__normalize_coerce(p, field, result, nullable, error) + if ( + errors.COERCION_FAILED + in self.document_error_tree.fetch_errors_from( + self.document_path + (field,) + ) + ): break return result try: return processor(value) except Exception as e: - if not nullable and e is not TypeError: + if not (nullable and value is None): self._error(field, error, str(e)) return value def __normalize_containers(self, mapping, schema): for field in mapping: - if field not in schema: - continue + rules = set(schema.get(field, ())) + # TODO: This check conflates validation and normalization if isinstance(mapping[field], Mapping): - if 'keyschema' in schema[field]: - self.__normalize_mapping_per_keyschema( - field, mapping, schema[field]['keyschema']) - if 'valueschema' in schema[field]: - self.__normalize_mapping_per_valueschema( - field, mapping, schema[field]['valueschema']) - if set(schema[field]) & set(('allow_unknown', 'purge_unknown', - 'schema')): + if 'keysrules' in rules: + self.__normalize_mapping_per_keysrules( + field, mapping, schema[field]['keysrules'] + ) + if 'valuesrules' in rules: + self.__normalize_mapping_per_valuesrules( + field, mapping, schema[field]['valuesrules'] + ) + if rules & set( + ('allow_unknown', 'purge_unknown', 'schema') + ) or isinstance(self.allow_unknown, Mapping): try: - self.__normalize_mapping_per_schema( - field, mapping, schema) + self.__normalize_mapping_per_schema(field, mapping, schema) except _SchemaRuleTypeError: pass + elif isinstance(mapping[field], _str_type): continue - elif isinstance(mapping[field], Sequence) and \ - 'schema' in schema[field]: - self.__normalize_sequence(field, mapping, schema) - def __normalize_mapping_per_keyschema(self, field, mapping, property_rules): + elif isinstance(mapping[field], Sequence): + if 'schema' in rules: + self.__normalize_sequence_per_schema(field, mapping, schema) + elif 'items' in rules: + self.__normalize_sequence_per_items(field, mapping, schema) + + def __normalize_mapping_per_keysrules(self, field, mapping, property_rules): schema = dict(((k, property_rules) for k in mapping[field])) document = dict(((k, k) for k in mapping[field])) validator = self._get_child_validator( - document_crumb=field, schema_crumb=(field, 'keyschema'), - schema=schema) + document_crumb=field, schema_crumb=(field, 'keysrules'), schema=schema + ) result = validator.normalized(document, always_return_document=True) if validator._errors: self._drop_nodes_from_errorpaths(validator._errors, [], [2, 4]) @@ -700,46 +772,72 @@ def __normalize_mapping_per_keyschema(self, field, mapping, property_rules): if k == result[k]: continue if result[k] in mapping[field]: - warn("Normalizing keys of {path}: {key} already exists, " - "its value is replaced." - .format(path='.'.join(self.document_path + (field,)), - key=k)) + warn( + "Normalizing keys of {path}: {key} already exists, " + "its value is replaced.".format( + path='.'.join(str(x) for x in self.document_path + (field,)), + key=k, + ) + ) mapping[field][result[k]] = mapping[field][k] else: mapping[field][result[k]] = mapping[field][k] del mapping[field][k] - def __normalize_mapping_per_valueschema(self, field, mapping, value_rules): + def __normalize_mapping_per_valuesrules(self, field, mapping, value_rules): schema = dict(((k, value_rules) for k in mapping[field])) validator = self._get_child_validator( - document_crumb=field, schema_crumb=(field, 'valueschema'), - schema=schema) - mapping[field] = validator.normalized(mapping[field], - always_return_document=True) + document_crumb=field, schema_crumb=(field, 'valuesrules'), schema=schema + ) + mapping[field] = validator.normalized( + mapping[field], always_return_document=True + ) if validator._errors: self._drop_nodes_from_errorpaths(validator._errors, [], [2]) self._error(validator._errors) def __normalize_mapping_per_schema(self, field, mapping, schema): + rules = schema.get(field, {}) + if not rules and isinstance(self.allow_unknown, Mapping): + rules = self.allow_unknown validator = self._get_child_validator( - document_crumb=field, schema_crumb=(field, 'schema'), - schema=schema[field].get('schema', {}), - allow_unknown=schema[field].get('allow_unknown', self.allow_unknown), # noqa: E501 - purge_unknown=schema[field].get('purge_unknown', self.purge_unknown)) # noqa: E501 + document_crumb=field, + schema_crumb=(field, 'schema'), + schema=rules.get('schema', {}), + allow_unknown=rules.get('allow_unknown', self.allow_unknown), # noqa: E501 + purge_unknown=rules.get('purge_unknown', self.purge_unknown), + require_all=rules.get('require_all', self.require_all), + ) # noqa: E501 value_type = type(mapping[field]) - result_value = validator.normalized(mapping[field], - always_return_document=True) + result_value = validator.normalized(mapping[field], always_return_document=True) mapping[field] = value_type(result_value) if validator._errors: self._error(validator._errors) - def __normalize_sequence(self, field, mapping, schema): - schema = dict(((k, schema[field]['schema']) - for k in range(len(mapping[field])))) + def __normalize_sequence_per_schema(self, field, mapping, schema): + schema = dict( + ((k, schema[field]['schema']) for k in range(len(mapping[field]))) + ) document = dict((k, v) for k, v in enumerate(mapping[field])) validator = self._get_child_validator( - document_crumb=field, schema_crumb=(field, 'schema'), - schema=schema) + document_crumb=field, schema_crumb=(field, 'schema'), schema=schema + ) + value_type = type(mapping[field]) + result = validator.normalized(document, always_return_document=True) + mapping[field] = value_type(result.values()) + if validator._errors: + self._drop_nodes_from_errorpaths(validator._errors, [], [2]) + self._error(validator._errors) + + def __normalize_sequence_per_items(self, field, mapping, schema): + rules, values = schema[field]['items'], mapping[field] + if len(rules) != len(values): + return + schema = dict(((k, v) for k, v in enumerate(rules))) + document = dict((k, v) for k, v in enumerate(values)) + validator = self._get_child_validator( + document_crumb=field, schema_crumb=(field, 'items'), schema=schema + ) value_type = type(mapping[field]) result = validator.normalized(document, always_return_document=True) mapping[field] = value_type(result.values()) @@ -747,12 +845,17 @@ def __normalize_sequence(self, field, mapping, schema): self._drop_nodes_from_errorpaths(validator._errors, [], [2]) self._error(validator._errors) + @staticmethod + def __normalize_purge_readonly(mapping, schema): + for field in [x for x in mapping if schema.get(x, {}).get('readonly', False)]: + mapping.pop(field) + return mapping + @staticmethod def _normalize_purge_unknown(mapping, schema): """ {'type': 'boolean'} """ - for field in tuple(mapping): - if field not in schema: - del mapping[field] + for field in [x for x in mapping if x not in schema]: + mapping.pop(field) return mapping def __normalize_rename_fields(self, mapping, schema): @@ -760,10 +863,13 @@ def __normalize_rename_fields(self, mapping, schema): if field in schema: self._normalize_rename(mapping, schema, field) self._normalize_rename_handler(mapping, schema, field) - elif isinstance(self.allow_unknown, Mapping) and \ - 'rename_handler' in self.allow_unknown: + elif ( + isinstance(self.allow_unknown, Mapping) + and 'rename_handler' in self.allow_unknown + ): self._normalize_rename_handler( - mapping, {field: self.allow_unknown}, field) + mapping, {field: self.allow_unknown}, field + ) return mapping def _normalize_rename(self, mapping, schema, field): @@ -783,47 +889,62 @@ def _normalize_rename_handler(self, mapping, schema, field): if 'rename_handler' not in schema[field]: return new_name = self.__normalize_coerce( - schema[field]['rename_handler'], field, field, - False, errors.RENAMING_FAILED) + schema[field]['rename_handler'], field, field, False, errors.RENAMING_FAILED + ) if new_name != field: mapping[new_name] = mapping[field] del mapping[field] def __validate_readonly_fields(self, mapping, schema): - for field in (x for x in schema if x in mapping and - self._resolve_rules_set(schema[x]).get('readonly')): - self._validate_readonly(schema[field]['readonly'], field, - mapping[field]) + for field in ( + x + for x in schema + if x in mapping and self._resolve_rules_set(schema[x]).get('readonly') + ): + self._validate_readonly(schema[field]['readonly'], field, mapping[field]) def __normalize_default_fields(self, mapping, schema): - fields = [x for x in schema if x not in mapping or - mapping[x] is None and not schema[x].get('nullable', False)] + empty_fields = [ + x + for x in schema + if x not in mapping + or ( + mapping[x] is None # noqa: W503 + and not schema[x].get('nullable', False) + ) # noqa: W503 + ] + try: - fields_with_default = [x for x in fields if 'default' in schema[x]] + fields_with_default = [x for x in empty_fields if 'default' in schema[x]] except TypeError: raise _SchemaRuleTypeError for field in fields_with_default: self._normalize_default(mapping, schema, field) known_fields_states = set() - fields = [x for x in fields if 'default_setter' in schema[x]] - while fields: - field = fields.pop(0) + fields_with_default_setter = [ + x for x in empty_fields if 'default_setter' in schema[x] + ] + while fields_with_default_setter: + field = fields_with_default_setter.pop(0) try: self._normalize_default_setter(mapping, schema, field) except KeyError: - fields.append(field) + fields_with_default_setter.append(field) except Exception as e: self._error(field, errors.SETTING_DEFAULT_FAILED, str(e)) - fields_state = tuple(fields) - if fields_state in known_fields_states: - for field in fields: - self._error(field, errors.SETTING_DEFAULT_FAILED, - 'Circular dependencies of default setters.') + fields_processing_state = hash(tuple(fields_with_default_setter)) + if fields_processing_state in known_fields_states: + for field in fields_with_default_setter: + self._error( + field, + errors.SETTING_DEFAULT_FAILED, + 'Circular dependencies of default setters.', + ) break else: - known_fields_states.add(fields_state) + known_fields_states.add(fields_processing_state) def _normalize_default(self, mapping, schema, field): """ {'nullable': True} """ @@ -837,8 +958,7 @@ def _normalize_default_setter(self, mapping, schema, field): if 'default_setter' in schema[field]: setter = schema[field]['default_setter'] if isinstance(setter, _str_type): - setter = self.__get_rule_handler('normalize_default_setter', - setter) + setter = self.__get_rule_handler('normalize_default_setter', setter) mapping[field] = setter(mapping) # # Validating @@ -904,11 +1024,10 @@ def __validate_unknown_fields(self, field): if isinstance(self.allow_unknown, (Mapping, _str_type)): # validate that unknown fields matches the schema # for unknown_fields - schema_crumb = 'allow_unknown' if self.is_child \ - else '__allow_unknown__' + schema_crumb = 'allow_unknown' if self.is_child else '__allow_unknown__' validator = self._get_child_validator( - schema_crumb=schema_crumb, - schema={field: self.allow_unknown}) + schema_crumb=schema_crumb, schema={field: self.allow_unknown} + ) if not validator({field: value}, normalize=False): self._error(validator._errors) else: @@ -924,14 +1043,21 @@ def validate_rule(rule): definitions = self._resolve_rules_set(definitions) value = self.document[field] - rules_queue = [x for x in self.priority_validations - if x in definitions or x in self.mandatory_validations] - rules_queue.extend(x for x in self.mandatory_validations - if x not in rules_queue) - rules_queue.extend(x for x in definitions - if x not in rules_queue and - x not in self.normalization_rules and - x not in ('allow_unknown', 'required')) + rules_queue = [ + x + for x in self.priority_validations + if x in definitions or x in self.mandatory_validations + ] + rules_queue.extend( + x for x in self.mandatory_validations if x not in rules_queue + ) + rules_queue.extend( + x + for x in definitions + if x not in rules_queue + and x not in self.normalization_rules + and x not in ('allow_unknown', 'require_all', 'meta', 'required') + ) self._remaining_rules = rules_queue while self._remaining_rules: @@ -952,10 +1078,11 @@ def validate_rule(rule): _validate_allow_unknown = dummy_for_rule_validation( """ {'oneof': [{'type': 'boolean'}, {'type': ['dict', 'string'], - 'validator': 'bulk_schema'}]} """) + 'check_with': 'bulk_schema'}]} """ + ) def _validate_allowed(self, allowed_values, field, value): - """ {'type': 'list'} """ + """ {'type': 'container'} """ if isinstance(value, Iterable) and not isinstance(value, _str_type): unallowed = set(value) - set(allowed_values) if unallowed: @@ -964,10 +1091,54 @@ def _validate_allowed(self, allowed_values, field, value): if value not in allowed_values: self._error(field, errors.UNALLOWED_VALUE, value) + def _validate_check_with(self, checks, field, value): + """ {'oneof': [ + {'type': 'callable'}, + {'type': 'list', + 'schema': {'oneof': [{'type': 'callable'}, + {'type': 'string'}]}}, + {'type': 'string'} + ]} """ + if isinstance(checks, _str_type): + try: + value_checker = self.__get_rule_handler('check_with', checks) + # TODO remove on next major release + except RuntimeError: + value_checker = self.__get_rule_handler('validator', checks) + warn( + "The 'validator' rule was renamed to 'check_with'. Please update " + "your schema and method names accordingly.", + DeprecationWarning, + ) + value_checker(field, value) + elif isinstance(checks, Iterable): + for v in checks: + self._validate_check_with(v, field, value) + else: + checks(field, value, self._error) + + def _validate_contains(self, expected_values, field, value): + """ {'empty': False } """ + if not isinstance(value, Iterable): + return + + if not isinstance(expected_values, Iterable) or isinstance( + expected_values, _str_type + ): + expected_values = set((expected_values,)) + else: + expected_values = set(expected_values) + + missing_values = expected_values - set(value) + if missing_values: + self._error(field, errors.MISSING_MEMBERS, missing_values) + def _validate_dependencies(self, dependencies, field, value): """ {'type': ('dict', 'hashable', 'list'), - 'validator': 'dependencies'} """ - if isinstance(dependencies, _str_type): + 'check_with': 'dependencies'} """ + if isinstance(dependencies, _str_type) or not isinstance( + dependencies, (Iterable, Mapping) + ): dependencies = (dependencies,) if isinstance(dependencies, Sequence): @@ -975,20 +1146,24 @@ def _validate_dependencies(self, dependencies, field, value): elif isinstance(dependencies, Mapping): self.__validate_dependencies_mapping(dependencies, field) - if self.document_error_tree.fetch_node_from( - self.schema_path + (field, 'dependencies')) is not None: + if ( + self.document_error_tree.fetch_node_from( + self.schema_path + (field, 'dependencies') + ) + is not None + ): return True def __validate_dependencies_mapping(self, dependencies, field): validated_dependencies_counter = 0 error_info = {} for dependency_name, dependency_values in dependencies.items(): - if (not isinstance(dependency_values, Sequence) or - isinstance(dependency_values, _str_type)): + if not isinstance(dependency_values, Sequence) or isinstance( + dependency_values, _str_type + ): dependency_values = [dependency_values] - wanted_field, wanted_field_value = \ - self._lookup_field(dependency_name) + wanted_field, wanted_field_value = self._lookup_field(dependency_name) if wanted_field_value in dependency_values: validated_dependencies_counter += 1 else: @@ -1004,59 +1179,71 @@ def __validate_dependencies_sequence(self, dependencies, field): def _validate_empty(self, empty, field, value): """ {'type': 'boolean'} """ - if isinstance(value, Iterable) and len(value) == 0: + if isinstance(value, Sized) and len(value) == 0: self._drop_remaining_rules( - 'allowed', 'forbidden', 'items', 'minlength', 'maxlength', - 'regex', 'validator') + 'allowed', + 'forbidden', + 'items', + 'minlength', + 'maxlength', + 'regex', + 'check_with', + ) if not empty: self._error(field, errors.EMPTY_NOT_ALLOWED) - def _validate_excludes(self, excludes, field, value): + def _validate_excludes(self, excluded_fields, field, value): """ {'type': ('hashable', 'list'), 'schema': {'type': 'hashable'}} """ - if isinstance(excludes, Hashable): - excludes = [excludes] + if isinstance(excluded_fields, Hashable): + excluded_fields = [excluded_fields] - # Save required field to be checked latter - if 'required' in self.schema[field] and self.schema[field]['required']: + # Mark the currently evaluated field as not required for now if it actually is. + # One of the so marked will be needed to pass when required fields are checked. + if self.schema[field].get('required', self.require_all): self._unrequired_by_excludes.add(field) - for exclude in excludes: - if (exclude in self.schema and - 'required' in self.schema[exclude] and - self.schema[exclude]['required']): - self._unrequired_by_excludes.add(exclude) + for excluded_field in excluded_fields: + if excluded_field in self.schema and self.schema[field].get( + 'required', self.require_all + ): + + self._unrequired_by_excludes.add(excluded_field) - if [True for key in excludes if key in self.document]: - # Wrap each field in `excludes` list between quotes - exclusion_str = ', '.join("'{0}'" - .format(word) for word in excludes) + if any(excluded_field in self.document for excluded_field in excluded_fields): + exclusion_str = ', '.join( + "'{0}'".format(field) for field in excluded_fields + ) self._error(field, errors.EXCLUDES_FIELD, exclusion_str) def _validate_forbidden(self, forbidden_values, field, value): """ {'type': 'list'} """ - if isinstance(value, _str_type): - if value in forbidden_values: - self._error(field, errors.FORBIDDEN_VALUE, value) - elif isinstance(value, Sequence): + if isinstance(value, Sequence) and not isinstance(value, _str_type): forbidden = set(value) & set(forbidden_values) if forbidden: self._error(field, errors.FORBIDDEN_VALUES, list(forbidden)) - elif isinstance(value, int): + else: if value in forbidden_values: self._error(field, errors.FORBIDDEN_VALUE, value) def _validate_items(self, items, field, values): - """ {'type': 'list', 'validator': 'items'} """ + """ {'type': 'list', 'check_with': 'items'} """ if len(items) != len(values): self._error(field, errors.ITEMS_LENGTH, len(items), len(values)) else: - schema = dict((i, definition) for i, definition in enumerate(items)) # noqa: E501 - validator = self._get_child_validator(document_crumb=field, - schema_crumb=(field, 'items'), # noqa: E501 - schema=schema) - if not validator(dict((i, value) for i, value in enumerate(values)), - update=self.update, normalize=False): + schema = dict( + (i, definition) for i, definition in enumerate(items) + ) # noqa: E501 + validator = self._get_child_validator( + document_crumb=field, + schema_crumb=(field, 'items'), # noqa: E501 + schema=schema, + ) + if not validator( + dict((i, value) for i, value in enumerate(values)), + update=self.update, + normalize=False, + ): self._error(field, errors.BAD_ITEMS, validator._errors) def __validate_logical(self, operator, definitions, field, value): @@ -1074,8 +1261,8 @@ def __validate_logical(self, operator, definitions, field, value): schema[field]['allow_unknown'] = self.allow_unknown validator = self._get_child_validator( - schema_crumb=(field, operator, i), - schema=schema, allow_unknown=True) + schema_crumb=(field, operator, i), schema=schema, allow_unknown=True + ) if validator(self.document, update=self.update, normalize=False): valid_counter += 1 else: @@ -1086,35 +1273,27 @@ def __validate_logical(self, operator, definitions, field, value): def _validate_anyof(self, definitions, field, value): """ {'type': 'list', 'logical': 'anyof'} """ - valids, _errors = \ - self.__validate_logical('anyof', definitions, field, value) + valids, _errors = self.__validate_logical('anyof', definitions, field, value) if valids < 1: - self._error(field, errors.ANYOF, _errors, - valids, len(definitions)) + self._error(field, errors.ANYOF, _errors, valids, len(definitions)) def _validate_allof(self, definitions, field, value): """ {'type': 'list', 'logical': 'allof'} """ - valids, _errors = \ - self.__validate_logical('allof', definitions, field, value) + valids, _errors = self.__validate_logical('allof', definitions, field, value) if valids < len(definitions): - self._error(field, errors.ALLOF, _errors, - valids, len(definitions)) + self._error(field, errors.ALLOF, _errors, valids, len(definitions)) def _validate_noneof(self, definitions, field, value): """ {'type': 'list', 'logical': 'noneof'} """ - valids, _errors = \ - self.__validate_logical('noneof', definitions, field, value) + valids, _errors = self.__validate_logical('noneof', definitions, field, value) if valids > 0: - self._error(field, errors.NONEOF, _errors, - valids, len(definitions)) + self._error(field, errors.NONEOF, _errors, valids, len(definitions)) def _validate_oneof(self, definitions, field, value): """ {'type': 'list', 'logical': 'oneof'} """ - valids, _errors = \ - self.__validate_logical('oneof', definitions, field, value) + valids, _errors = self.__validate_logical('oneof', definitions, field, value) if valids != 1: - self._error(field, errors.ONEOF, _errors, - valids, len(definitions)) + self._error(field, errors.ONEOF, _errors, valids, len(definitions)) def _validate_max(self, max_value, field, value): """ {'nullable': False } """ @@ -1137,6 +1316,8 @@ def _validate_maxlength(self, max_length, field, value): if isinstance(value, Iterable) and len(value) > max_length: self._error(field, errors.MAX_LENGTH, len(value)) + _validate_meta = dummy_for_rule_validation('') + def _validate_minlength(self, min_length, field, value): """ {'type': 'integer'} """ if isinstance(value, Iterable) and len(value) < min_length: @@ -1148,23 +1329,33 @@ def _validate_nullable(self, nullable, field, value): if not nullable: self._error(field, errors.NOT_NULLABLE) self._drop_remaining_rules( - 'empty', 'forbidden', 'items', 'keyschema', 'min', 'max', - 'minlength', 'maxlength', 'regex', 'schema', 'type', - 'valueschema') + 'allowed', + 'empty', + 'forbidden', + 'items', + 'keysrules', + 'min', + 'max', + 'minlength', + 'maxlength', + 'regex', + 'schema', + 'type', + 'valuesrules', + ) - def _validate_keyschema(self, schema, field, value): - """ {'type': ['dict', 'string'], 'validator': 'bulk_schema', + def _validate_keysrules(self, schema, field, value): + """ {'type': ['dict', 'string'], 'check_with': 'bulk_schema', 'forbidden': ['rename', 'rename_handler']} """ if isinstance(value, Mapping): validator = self._get_child_validator( document_crumb=field, - schema_crumb=(field, 'keyschema'), - schema=dict(((k, schema) for k in value.keys()))) - if not validator(dict(((k, k) for k in value.keys())), - normalize=False): - self._drop_nodes_from_errorpaths(validator._errors, - [], [2, 4]) - self._error(field, errors.KEYSCHEMA, validator._errors) + schema_crumb=(field, 'keysrules'), + schema=dict(((k, schema) for k in value.keys())), + ) + if not validator(dict(((k, k) for k in value.keys())), normalize=False): + self._drop_nodes_from_errorpaths(validator._errors, [], [2, 4]) + self._error(field, errors.KEYSRULES, validator._errors) def _validate_readonly(self, readonly, field, value): """ {'type': 'boolean'} """ @@ -1174,9 +1365,12 @@ def _validate_readonly(self, readonly, field, value): # If the document was normalized (and therefore already been # checked for readonly fields), we still have to return True # if an error was filed. - has_error = errors.READONLY_FIELD in \ - self.document_error_tree.fetch_errors_from( - self.document_path + (field,)) + has_error = ( + errors.READONLY_FIELD + in self.document_error_tree.fetch_errors_from( + self.document_path + (field,) + ) + ) if self._is_normalized and has_error: self._drop_remaining_rules() @@ -1192,41 +1386,47 @@ def _validate_regex(self, pattern, field, value): _validate_required = dummy_for_rule_validation(""" {'type': 'boolean'} """) + _validate_require_all = dummy_for_rule_validation(""" {'type': 'boolean'} """) + def __validate_required_fields(self, document): """ Validates that required fields are not missing. :param document: The document being validated. """ try: - required = set(field for field, definition in self.schema.items() - if self._resolve_rules_set(definition). - get('required') is True) + required = set( + field + for field, definition in self.schema.items() + if self._resolve_rules_set(definition).get('required', self.require_all) + is True + ) except AttributeError: if self.is_child and self.schema_path[-1] == 'schema': raise _SchemaRuleTypeError else: raise required -= self._unrequired_by_excludes - missing = required - set(field for field in document - if document.get(field) is not None or - not self.ignore_none_values) + missing = required - set( + field + for field in document + if document.get(field) is not None or not self.ignore_none_values + ) for field in missing: self._error(field, errors.REQUIRED_FIELD) - # At least on field from self._unrequired_by_excludes should be - # present in document + # At least one field from self._unrequired_by_excludes should be present in + # document. if self._unrequired_by_excludes: - fields = set(field for field in document - if document.get(field) is not None) + fields = set(field for field in document if document.get(field) is not None) if self._unrequired_by_excludes.isdisjoint(fields): for field in self._unrequired_by_excludes - fields: self._error(field, errors.REQUIRED_FIELD) def _validate_schema(self, schema, field, value): """ {'type': ['dict', 'string'], - 'anyof': [{'validator': 'schema'}, - {'validator': 'bulk_schema'}]} """ + 'anyof': [{'check_with': 'schema'}, + {'check_with': 'bulk_schema'}]} """ if schema is None: return @@ -1237,12 +1437,15 @@ def _validate_schema(self, schema, field, value): def __validate_schema_mapping(self, field, schema, value): schema = self._resolve_schema(schema) - allow_unknown = self.schema[field].get('allow_unknown', - self.allow_unknown) - validator = self._get_child_validator(document_crumb=field, - schema_crumb=(field, 'schema'), - schema=schema, - allow_unknown=allow_unknown) + allow_unknown = self.schema[field].get('allow_unknown', self.allow_unknown) + require_all = self.schema[field].get('require_all', self.require_all) + validator = self._get_child_validator( + document_crumb=field, + schema_crumb=(field, 'schema'), + schema=schema, + allow_unknown=allow_unknown, + require_all=require_all, + ) try: if not validator(value, update=self.update, normalize=False): self._error(field, errors.MAPPING_SCHEMA, validator._errors) @@ -1253,10 +1456,16 @@ def __validate_schema_mapping(self, field, schema, value): def __validate_schema_sequence(self, field, schema, value): schema = dict(((i, schema) for i in range(len(value)))) validator = self._get_child_validator( - document_crumb=field, schema_crumb=(field, 'schema'), - schema=schema, allow_unknown=self.allow_unknown) - validator(dict(((i, v) for i, v in enumerate(value))), - update=self.update, normalize=False) + document_crumb=field, + schema_crumb=(field, 'schema'), + schema=schema, + allow_unknown=self.allow_unknown, + ) + validator( + dict(((i, v) for i, v in enumerate(value))), + update=self.update, + normalize=False, + ) if validator._errors: self._drop_nodes_from_errorpaths(validator._errors, [], [2]) @@ -1264,7 +1473,7 @@ def __validate_schema_sequence(self, field, schema, value): def _validate_type(self, data_type, field, value): """ {'type': ['string', 'list'], - 'validator': 'type'} """ + 'check_with': 'type'} """ if not data_type: return @@ -1275,8 +1484,9 @@ def _validate_type(self, data_type, field, value): # this implementation still supports custom type validation methods type_definition = self.types_mapping.get(_type) if type_definition is not None: - matched = isinstance(value, type_definition.included_types) \ - and not isinstance(value, type_definition.excluded_types) + matched = isinstance( + value, type_definition.included_types + ) and not isinstance(value, type_definition.excluded_types) else: type_handler = self.__get_rule_handler('validate_type', _type) matched = type_handler(value) @@ -1293,43 +1503,28 @@ def _validate_type(self, data_type, field, value): self._error(field, errors.BAD_TYPE) self._drop_remaining_rules() - def _validate_validator(self, validator, field, value): - """ {'oneof': [ - {'type': 'callable'}, - {'type': 'list', - 'schema': {'oneof': [{'type': 'callable'}, - {'type': 'string'}]}}, - {'type': 'string'} - ]} """ - if isinstance(validator, _str_type): - validator = self.__get_rule_handler('validator', validator) - validator(field, value) - elif isinstance(validator, Iterable): - for v in validator: - self._validate_validator(v, field, value) - else: - validator(field, value, self._error) - - def _validate_valueschema(self, schema, field, value): - """ {'type': ['dict', 'string'], 'validator': 'bulk_schema', + def _validate_valuesrules(self, schema, field, value): + """ {'type': ['dict', 'string'], 'check_with': 'bulk_schema', 'forbidden': ['rename', 'rename_handler']} """ - schema_crumb = (field, 'valueschema') + schema_crumb = (field, 'valuesrules') if isinstance(value, Mapping): validator = self._get_child_validator( - document_crumb=field, schema_crumb=schema_crumb, - schema=dict((k, schema) for k in value)) + document_crumb=field, + schema_crumb=schema_crumb, + schema=dict((k, schema) for k in value), + ) validator(value, update=self.update, normalize=False) if validator._errors: self._drop_nodes_from_errorpaths(validator._errors, [], [2]) - self._error(field, errors.VALUESCHEMA, validator._errors) + self._error(field, errors.VALUESRULES, validator._errors) -RULE_SCHEMA_SEPARATOR = \ - "The rule's arguments are validated against this schema:" +RULE_SCHEMA_SEPARATOR = "The rule's arguments are validated against this schema:" class InspectedValidator(type): """ Metaclass for all validators """ + def __new__(cls, *args): if '__doc__' not in args[2]: args[2].update({'__doc__': args[1][0].__doc__}) @@ -1337,8 +1532,11 @@ def __new__(cls, *args): def __init__(cls, *args): def attributes_with_prefix(prefix): - return tuple(x.split('_', 2)[-1] for x in dir(cls) - if x.startswith('_' + prefix)) + return tuple( + x[len(prefix) + 2 :] + for x in dir(cls) + if x.startswith('_' + prefix + '_') + ) super(InspectedValidator, cls).__init__(*args) @@ -1346,20 +1544,27 @@ def attributes_with_prefix(prefix): for attribute in attributes_with_prefix('validate'): # TODO remove inspection of type test methods in next major release if attribute.startswith('type_'): - cls._types_from_methods += (attribute[len('type_'):],) + cls._types_from_methods += (attribute[len('type_') :],) else: - cls.validation_rules[attribute] = \ - cls.__get_rule_schema('_validate_' + attribute) + cls.validation_rules[attribute] = cls.__get_rule_schema( + '_validate_' + attribute + ) # TODO remove on next major release if cls._types_from_methods: - warn("Methods for type testing are deprecated, use TypeDefinition " - "and the 'types_mapping'-property of a Validator-instance " - "instead.", DeprecationWarning) + warn( + "Methods for type testing are deprecated, use TypeDefinition " + "and the 'types_mapping'-property of a Validator-instance " + "instead.", + DeprecationWarning, + ) - cls.validators = tuple(x for x in attributes_with_prefix('validator')) - x = cls.validation_rules['validator']['oneof'] - x[1]['schema']['oneof'][1]['allowed'] = x[2]['allowed'] = cls.validators + # TODO remove second summand on next major release + cls.checkers = tuple(x for x in attributes_with_prefix('check_with')) + tuple( + x for x in attributes_with_prefix('validator') + ) + x = cls.validation_rules['check_with']['oneof'] + x[1]['schema']['oneof'][1]['allowed'] = x[2]['allowed'] = cls.checkers for rule in (x for x in cls.mandatory_validations if x != 'nullable'): cls.validation_rules[rule]['required'] = True @@ -1367,19 +1572,20 @@ def attributes_with_prefix(prefix): cls.coercers, cls.default_setters, cls.normalization_rules = (), (), {} for attribute in attributes_with_prefix('normalize'): if attribute.startswith('coerce_'): - cls.coercers += (attribute[len('coerce_'):],) + cls.coercers += (attribute[len('coerce_') :],) elif attribute.startswith('default_setter_'): - cls.default_setters += (attribute[len('default_setter_'):],) + cls.default_setters += (attribute[len('default_setter_') :],) else: - cls.normalization_rules[attribute] = \ - cls.__get_rule_schema('_normalize_' + attribute) + cls.normalization_rules[attribute] = cls.__get_rule_schema( + '_normalize_' + attribute + ) for rule in ('coerce', 'rename_handler'): x = cls.normalization_rules[rule]['oneof'] - x[1]['schema']['oneof'][1]['allowed'] = \ - x[2]['allowed'] = cls.coercers - cls.normalization_rules['default_setter']['oneof'][1]['allowed'] = \ - cls.default_setters + x[1]['schema']['oneof'][1]['allowed'] = x[2]['allowed'] = cls.coercers + cls.normalization_rules['default_setter']['oneof'][1][ + 'allowed' + ] = cls.default_setters cls.rules = {} cls.rules.update(cls.validation_rules) @@ -1397,9 +1603,11 @@ def __get_rule_schema(cls, method_name): except Exception: result = {} - if not result: - warn("No validation schema is defined for the arguments of rule " - "'%s'" % method_name.split('_', 2)[-1]) + if not result and method_name != '_validate_meta': + warn( + "No validation schema is defined for the arguments of rule " + "'%s'" % method_name.split('_', 2)[-1] + ) return result diff --git a/pipenv/vendor/certifi/__init__.py b/pipenv/vendor/certifi/__init__.py index ef71f3af34..632db8e132 100644 --- a/pipenv/vendor/certifi/__init__.py +++ b/pipenv/vendor/certifi/__init__.py @@ -1,3 +1,3 @@ from .core import where -__version__ = "2018.11.29" +__version__ = "2019.03.09" diff --git a/pipenv/vendor/certifi/cacert.pem b/pipenv/vendor/certifi/cacert.pem index db68797e24..84636dde7d 100644 --- a/pipenv/vendor/certifi/cacert.pem +++ b/pipenv/vendor/certifi/cacert.pem @@ -4510,3 +4510,149 @@ Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw 3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= -----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- diff --git a/pipenv/vendor/certifi/core.py b/pipenv/vendor/certifi/core.py index 2d02ea44c4..7271acf40e 100644 --- a/pipenv/vendor/certifi/core.py +++ b/pipenv/vendor/certifi/core.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # -*- coding: utf-8 -*- """ @@ -14,7 +13,3 @@ def where(): f = os.path.dirname(__file__) return os.path.join(f, 'cacert.pem') - - -if __name__ == '__main__': - print(where()) diff --git a/pipenv/vendor/click_completion/__init__.py b/pipenv/vendor/click_completion/__init__.py index e30cc0e362..620d79261e 100644 --- a/pipenv/vendor/click_completion/__init__.py +++ b/pipenv/vendor/click_completion/__init__.py @@ -19,7 +19,7 @@ from click_completion.lib import get_auto_shell from click_completion.patch import patch as _patch -__version__ = '0.5.0' +__version__ = '0.5.1' _initialized = False diff --git a/pipenv/vendor/click_completion/core.py b/pipenv/vendor/click_completion/core.py index dc47d471e2..36150d149e 100644 --- a/pipenv/vendor/click_completion/core.py +++ b/pipenv/vendor/click_completion/core.py @@ -131,8 +131,9 @@ def get_choices(cli, prog_name, args, incomplete): choices.append((opt, None)) if isinstance(ctx.command, MultiCommand): for name in ctx.command.list_commands(ctx): - if match(name, incomplete): - choices.append((name, ctx.command.get_command_short_help(ctx, name))) + command = ctx.command.get_command(ctx, name) + if match(name, incomplete) and not command.hidden: + choices.append((name, command.get_short_help_str())) for item, help in choices: yield (item, help) @@ -201,7 +202,7 @@ def do_fish_complete(cli, prog_name): for item, help in get_choices(cli, prog_name, args, incomplete): if help: - echo("%s\t%s" % (item, re.sub('\s', ' ', help))) + echo("%s\t%s" % (item, re.sub(r'\s', ' ', help))) else: echo(item) @@ -232,11 +233,11 @@ def do_zsh_complete(cli, prog_name): incomplete = '' def escape(s): - return s.replace('"', '""').replace("'", "''").replace('$', '\\$') + return s.replace('"', '""').replace("'", "''").replace('$', '\\$').replace('`', '\\`') res = [] for item, help in get_choices(cli, prog_name, args, incomplete): if help: - res.append('"%s"\:"%s"' % (escape(item), escape(help))) + res.append(r'"%s"\:"%s"' % (escape(item), escape(help))) else: res.append('"%s"' % escape(item)) if res: @@ -349,13 +350,8 @@ def install(shell=None, prog_name=None, env_name=None, path=None, append=None, e path = path or os.path.expanduser('~') + '/.bash_completion' mode = mode or 'a' elif shell == 'zsh': - ohmyzsh = os.path.expanduser('~') + '/.oh-my-zsh' - if os.path.exists(ohmyzsh): - path = path or ohmyzsh + '/completions/_%s' % prog_name - mode = mode or 'w' - else: - path = path or os.path.expanduser('~') + '/.zshrc' - mode = mode or 'a' + path = path or os.path.expanduser('~') + '/.zshrc' + mode = mode or 'a' elif shell == 'powershell': subprocess.check_call(['powershell', 'Set-ExecutionPolicy Unrestricted -Scope CurrentUser']) path = path or subprocess.check_output(['powershell', '-NoProfile', 'echo $profile']).strip() if install else '' diff --git a/pipenv/vendor/click_completion/zsh.j2 b/pipenv/vendor/click_completion/zsh.j2 index 9e1024a81c..ac796615eb 100644 --- a/pipenv/vendor/click_completion/zsh.j2 +++ b/pipenv/vendor/click_completion/zsh.j2 @@ -3,6 +3,5 @@ _{{prog_name}}() { eval $(env COMMANDLINE="${words[1,$CURRENT]}" {{complete_var}}=complete-zsh {% for k, v in extra_env.items() %} {{k}}={{v}}{% endfor %} {{prog_name}}) } if [[ "$(basename -- ${(%):-%x})" != "_{{prog_name}}" ]]; then - autoload -U compinit && compinit compdef _{{prog_name}} {{prog_name}} fi diff --git a/pipenv/vendor/distlib/__init__.py b/pipenv/vendor/distlib/__init__.py index a786b4d3b7..08fe1fc48f 100644 --- a/pipenv/vendor/distlib/__init__.py +++ b/pipenv/vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.2.8' +__version__ = '0.2.9' class DistlibException(Exception): pass diff --git a/pipenv/vendor/distlib/index.py b/pipenv/vendor/distlib/index.py index 2406be2169..7a87cdcf7a 100644 --- a/pipenv/vendor/distlib/index.py +++ b/pipenv/vendor/distlib/index.py @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_INDEX = 'https://pypi.org/pypi' DEFAULT_REALM = 'pypi' class PackageIndex(object): diff --git a/pipenv/vendor/distlib/locators.py b/pipenv/vendor/distlib/locators.py index 5c655c3e51..a7ed9469d8 100644 --- a/pipenv/vendor/distlib/locators.py +++ b/pipenv/vendor/distlib/locators.py @@ -36,7 +36,7 @@ HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') -DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_INDEX = 'https://pypi.org/pypi' def get_all_distribution_names(url=None): """ @@ -197,7 +197,7 @@ def score_url(self, url): is_downloadable = basename.endswith(self.downloadable_extensions) if is_wheel: compatible = is_compatible(Wheel(basename), self.wheel_tags) - return (t.scheme == 'https', 'pypi.python.org' in t.netloc, + return (t.scheme == 'https', 'pypi.org' in t.netloc, is_downloadable, is_wheel, compatible, basename) def prefer_url(self, url1, url2): @@ -1049,7 +1049,7 @@ def get_distribution_names(self): # versions which don't conform to PEP 426 / PEP 440. default_locator = AggregatingLocator( JSONLocator(), - SimpleScrapingLocator('https://pypi.python.org/simple/', + SimpleScrapingLocator('https://pypi.org/simple/', timeout=3.0), scheme='legacy') diff --git a/pipenv/vendor/distlib/metadata.py b/pipenv/vendor/distlib/metadata.py index 77eed7f968..2d61378e99 100644 --- a/pipenv/vendor/distlib/metadata.py +++ b/pipenv/vendor/distlib/metadata.py @@ -91,9 +91,11 @@ class MetadataInvalidError(DistlibException): _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') -# See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include -# it in the tuple literal below to allow it (for now) -_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires') +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides') _566_MARKERS = ('Description-Content-Type',) diff --git a/pipenv/vendor/distlib/scripts.py b/pipenv/vendor/distlib/scripts.py index 8e22cb9163..5965e241d6 100644 --- a/pipenv/vendor/distlib/scripts.py +++ b/pipenv/vendor/distlib/scripts.py @@ -39,27 +39,12 @@ # check if Python is called on the first line with this expression FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s if __name__ == '__main__': - import sys, re - - def _resolve(module, func): - __import__(module) - mod = sys.modules[module] - parts = func.split('.') - result = getattr(mod, parts.pop(0)) - for p in parts: - result = getattr(result, p) - return result - - try: - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - - func = _resolve('%(module)s', '%(func)s') - rc = func() # None interpreted as 0 - except Exception as e: # only supporting Python >= 2.6 - sys.stderr.write('%%s\n' %% e) - rc = 1 - sys.exit(rc) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) ''' @@ -225,6 +210,7 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): def _get_script_text(self, entry): return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], func=entry.suffix) manifest = _DEFAULT_MANIFEST diff --git a/pipenv/vendor/distlib/util.py b/pipenv/vendor/distlib/util.py index 9d4bfd3bec..e851146c0f 100644 --- a/pipenv/vendor/distlib/util.py +++ b/pipenv/vendor/distlib/util.py @@ -804,11 +804,15 @@ def ensure_slash(s): def parse_credentials(netloc): username = password = None if '@' in netloc: - prefix, netloc = netloc.split('@', 1) + prefix, netloc = netloc.rsplit('@', 1) if ':' not in prefix: username = prefix else: username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) return username, password, netloc diff --git a/pipenv/vendor/distlib/wheel.py b/pipenv/vendor/distlib/wheel.py index b04bfaefe9..0c8efad9ae 100644 --- a/pipenv/vendor/distlib/wheel.py +++ b/pipenv/vendor/distlib/wheel.py @@ -433,6 +433,22 @@ def build(self, paths, tags=None, wheel_version=None): self.build_zip(pathname, archive_paths) return pathname + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + def install(self, paths, maker, **kwargs): """ Install a wheel to the specified paths. If kwarg ``warner`` is @@ -514,9 +530,7 @@ def install(self, paths, maker, **kwargs): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - if u_arcname.endswith('/RECORD.jws'): + if self.skip_entry(u_arcname): continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: @@ -786,13 +800,15 @@ def verify(self): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') - if '..' in u_arcname: + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: raise DistlibException('invalid entry in ' 'wheel: %r' % u_arcname) - # The signature file won't be in RECORD, - # and we don't currently don't do anything with it - if u_arcname.endswith('/RECORD.jws'): + if self.skip_entry(u_arcname): continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: diff --git a/pipenv/vendor/dotenv/__init__.py b/pipenv/vendor/dotenv/__init__.py index 50f27cd4b7..1867868f71 100644 --- a/pipenv/vendor/dotenv/__init__.py +++ b/pipenv/vendor/dotenv/__init__.py @@ -1,12 +1,15 @@ +from typing import Any, Optional from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values def load_ipython_extension(ipython): + # type: (Any) -> None from .ipython import load_ipython_extension load_ipython_extension(ipython) def get_cli_string(path=None, action=None, key=None, value=None, quote=None): + # type: (Optional[str], Optional[str], Optional[str], Optional[str], Optional[str]) -> str """Returns a string suitable for running as a shell script. Useful for converting a arguments passed to a fabric task diff --git a/pipenv/vendor/dotenv/cli.py b/pipenv/vendor/dotenv/cli.py index 4e03c12a5b..45f4b765e7 100644 --- a/pipenv/vendor/dotenv/cli.py +++ b/pipenv/vendor/dotenv/cli.py @@ -1,5 +1,6 @@ import os import sys +from typing import Any, List try: import click @@ -22,6 +23,7 @@ @click.version_option(version=__version__) @click.pass_context def cli(ctx, file, quote): + # type: (click.Context, Any, Any) -> None '''This script is used to set, get or unset values from a .env file.''' ctx.obj = {} ctx.obj['FILE'] = file @@ -31,6 +33,7 @@ def cli(ctx, file, quote): @cli.command() @click.pass_context def list(ctx): + # type: (click.Context) -> None '''Display all the stored key/value.''' file = ctx.obj['FILE'] dotenv_as_dict = dotenv_values(file) @@ -43,6 +46,7 @@ def list(ctx): @click.argument('key', required=True) @click.argument('value', required=True) def set(ctx, key, value): + # type: (click.Context, Any, Any) -> None '''Store the given key/value.''' file = ctx.obj['FILE'] quote = ctx.obj['QUOTE'] @@ -57,6 +61,7 @@ def set(ctx, key, value): @click.pass_context @click.argument('key', required=True) def get(ctx, key): + # type: (click.Context, Any) -> None '''Retrieve the value for the given key.''' file = ctx.obj['FILE'] stored_value = get_key(file, key) @@ -70,6 +75,7 @@ def get(ctx, key): @click.pass_context @click.argument('key', required=True) def unset(ctx, key): + # type: (click.Context, Any) -> None '''Removes the given key.''' file = ctx.obj['FILE'] quote = ctx.obj['QUOTE'] @@ -84,13 +90,14 @@ def unset(ctx, key): @click.pass_context @click.argument('commandline', nargs=-1, type=click.UNPROCESSED) def run(ctx, commandline): + # type: (click.Context, List[str]) -> None """Run command with environment variables present.""" file = ctx.obj['FILE'] dotenv_as_dict = dotenv_values(file) if not commandline: click.echo('No command given.') exit(1) - ret = run_command(commandline, dotenv_as_dict) + ret = run_command(commandline, dotenv_as_dict) # type: ignore exit(ret) diff --git a/pipenv/vendor/dotenv/compat.py b/pipenv/vendor/dotenv/compat.py index f6baa3617b..99ffb39b36 100644 --- a/pipenv/vendor/dotenv/compat.py +++ b/pipenv/vendor/dotenv/compat.py @@ -1,9 +1,8 @@ import sys -try: - from StringIO import StringIO # noqa -except ImportError: + +if sys.version_info >= (3, 0): from io import StringIO # noqa +else: + from StringIO import StringIO # noqa -PY2 = sys.version_info[0] == 2 -WIN = sys.platform.startswith('win') -text_type = unicode if PY2 else str # noqa +PY2 = sys.version_info[0] == 2 # type: bool diff --git a/pipenv/vendor/dotenv/environ.py b/pipenv/vendor/dotenv/environ.py deleted file mode 100644 index ad3571656f..0000000000 --- a/pipenv/vendor/dotenv/environ.py +++ /dev/null @@ -1,54 +0,0 @@ -import os - - -class UndefinedValueError(Exception): - pass - - -class Undefined(object): - """Class to represent undefined type. """ - pass - - -# Reference instance to represent undefined values -undefined = Undefined() - - -def _cast_boolean(value): - """ - Helper to convert config values to boolean as ConfigParser do. - """ - _BOOLEANS = {'1': True, 'yes': True, 'true': True, 'on': True, - '0': False, 'no': False, 'false': False, 'off': False, '': False} - value = str(value) - if value.lower() not in _BOOLEANS: - raise ValueError('Not a boolean: %s' % value) - - return _BOOLEANS[value.lower()] - - -def getenv(option, default=undefined, cast=undefined): - """ - Return the value for option or default if defined. - """ - - # We can't avoid __contains__ because value may be empty. - if option in os.environ: - value = os.environ[option] - else: - if isinstance(default, Undefined): - raise UndefinedValueError('{} not found. Declare it as envvar or define a default value.'.format(option)) - - value = default - - if isinstance(cast, Undefined): - return value - - if cast is bool: - value = _cast_boolean(value) - elif cast is list: - value = [x for x in value.split(',') if x] - else: - value = cast(value) - - return value diff --git a/pipenv/vendor/dotenv/ipython.py b/pipenv/vendor/dotenv/ipython.py index 06252f1e81..7f1b13d6c4 100644 --- a/pipenv/vendor/dotenv/ipython.py +++ b/pipenv/vendor/dotenv/ipython.py @@ -1,8 +1,8 @@ from __future__ import print_function -from IPython.core.magic import Magics, line_magic, magics_class -from IPython.core.magic_arguments import (argument, magic_arguments, - parse_argstring) +from IPython.core.magic import Magics, line_magic, magics_class # type: ignore +from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore + parse_argstring) # type: ignore from .main import find_dotenv, load_dotenv diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py index 98b22ec0d6..08122825eb 100644 --- a/pipenv/vendor/dotenv/main.py +++ b/pipenv/vendor/dotenv/main.py @@ -9,13 +9,26 @@ import sys from subprocess import Popen import tempfile +from typing import (Any, Dict, Iterator, List, Match, NamedTuple, Optional, # noqa + Pattern, Union, TYPE_CHECKING, Text, IO, Tuple) # noqa import warnings -from collections import OrderedDict, namedtuple +from collections import OrderedDict from contextlib import contextmanager -from .compat import StringIO, PY2, WIN, text_type +from .compat import StringIO, PY2 -__posix_variable = re.compile(r'\$\{[^\}]*\}') +if TYPE_CHECKING: # pragma: no cover + if sys.version_info >= (3, 6): + _PathLike = os.PathLike + else: + _PathLike = Text + + if sys.version_info >= (3, 0): + _StringIO = StringIO + else: + _StringIO = StringIO[Text] + +__posix_variable = re.compile(r'\$\{[^\}]*\}') # type: Pattern[Text] _binding = re.compile( r""" @@ -42,22 +55,27 @@ ) """.format(r'[^\S\r\n]'), re.MULTILINE | re.VERBOSE, -) +) # type: Pattern[Text] -_escape_sequence = re.compile(r"\\[\\'\"abfnrtv]") +_escape_sequence = re.compile(r"\\[\\'\"abfnrtv]") # type: Pattern[Text] -Binding = namedtuple('Binding', 'key value original') +Binding = NamedTuple("Binding", [("key", Optional[Text]), + ("value", Optional[Text]), + ("original", Text)]) def decode_escapes(string): + # type: (Text) -> Text def decode_match(match): - return codecs.decode(match.group(0), 'unicode-escape') + # type: (Match[Text]) -> Text + return codecs.decode(match.group(0), 'unicode-escape') # type: ignore return _escape_sequence.sub(decode_match, string) def is_surrounded_by(string, char): + # type: (Text, Text) -> bool return ( len(string) > 1 and string[0] == string[-1] == char @@ -65,7 +83,9 @@ def is_surrounded_by(string, char): def parse_binding(string, position): + # type: (Text, int) -> Tuple[Binding, int] match = _binding.match(string, position) + assert match is not None (matched, key, value) = match.groups() if key is None or value is None: key = None @@ -80,6 +100,7 @@ def parse_binding(string, position): def parse_stream(stream): + # type:(IO[Text]) -> Iterator[Binding] string = stream.read() position = 0 length = len(string) @@ -88,26 +109,41 @@ def parse_stream(stream): yield binding +def to_env(text): + # type: (Text) -> str + """ + Encode a string the same way whether it comes from the environment or a `.env` file. + """ + if PY2: + return text.encode(sys.getfilesystemencoding() or "utf-8") + else: + return text + + class DotEnv(): - def __init__(self, dotenv_path, verbose=False): - self.dotenv_path = dotenv_path - self._dict = None - self.verbose = verbose + def __init__(self, dotenv_path, verbose=False, encoding=None): + # type: (Union[Text, _PathLike, _StringIO], bool, Union[None, Text]) -> None + self.dotenv_path = dotenv_path # type: Union[Text,_PathLike, _StringIO] + self._dict = None # type: Optional[Dict[Text, Text]] + self.verbose = verbose # type: bool + self.encoding = encoding # type: Union[None, Text] @contextmanager def _get_stream(self): + # type: () -> Iterator[IO[Text]] if isinstance(self.dotenv_path, StringIO): yield self.dotenv_path elif os.path.isfile(self.dotenv_path): - with io.open(self.dotenv_path) as stream: + with io.open(self.dotenv_path, encoding=self.encoding) as stream: yield stream else: if self.verbose: - warnings.warn("File doesn't exist {}".format(self.dotenv_path)) + warnings.warn("File doesn't exist {}".format(self.dotenv_path)) # type: ignore yield StringIO('') def dict(self): + # type: () -> Dict[Text, Text] """Return dotenv as dict""" if self._dict: return self._dict @@ -117,29 +153,26 @@ def dict(self): return self._dict def parse(self): + # type: () -> Iterator[Tuple[Text, Text]] with self._get_stream() as stream: for mapping in parse_stream(stream): if mapping.key is not None and mapping.value is not None: yield mapping.key, mapping.value def set_as_environment_variables(self, override=False): + # type: (bool) -> bool """ Load the current dotenv as system environemt variable. """ for k, v in self.dict().items(): if k in os.environ and not override: continue - # With Python2 on Windows, force environment variables to str to avoid - # "TypeError: environment can only contain strings" in Python's subprocess.py. - if PY2 and WIN: - if isinstance(k, text_type) or isinstance(v, text_type): - k = k.encode('ascii') - v = v.encode('ascii') - os.environ[k] = v + os.environ[to_env(k)] = to_env(v) return True def get(self, key): + # type: (Text) -> Optional[Text] """ """ data = self.dict() @@ -148,10 +181,13 @@ def get(self, key): return data[key] if self.verbose: - warnings.warn("key %s not found in %s." % (key, self.dotenv_path)) + warnings.warn("key %s not found in %s." % (key, self.dotenv_path)) # type: ignore + + return None def get_key(dotenv_path, key_to_get): + # type: (Union[Text, _PathLike], Text) -> Optional[Text] """ Gets the value of a given key from the given .env @@ -162,10 +198,11 @@ def get_key(dotenv_path, key_to_get): @contextmanager def rewrite(path): + # type: (_PathLike) -> Iterator[Tuple[IO[Text], IO[Text]]] try: with tempfile.NamedTemporaryFile(mode="w+", delete=False) as dest: with io.open(path) as source: - yield (source, dest) + yield (source, dest) # type: ignore except BaseException: if os.path.isfile(dest.name): os.unlink(dest.name) @@ -175,6 +212,7 @@ def rewrite(path): def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): + # type: (_PathLike, Text, Text, Text) -> Tuple[Optional[bool], Text, Text] """ Adds or Updates a key/value to the given .env @@ -183,7 +221,7 @@ def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): """ value_to_set = value_to_set.strip("'").strip('"') if not os.path.exists(dotenv_path): - warnings.warn("can't write to %s - it doesn't exist." % dotenv_path) + warnings.warn("can't write to %s - it doesn't exist." % dotenv_path) # type: ignore return None, key_to_set, value_to_set if " " in value_to_set: @@ -207,6 +245,7 @@ def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"): def unset_key(dotenv_path, key_to_unset, quote_mode="always"): + # type: (_PathLike, Text, Text) -> Tuple[Optional[bool], Text] """ Removes a given key from the given .env @@ -214,7 +253,7 @@ def unset_key(dotenv_path, key_to_unset, quote_mode="always"): If the given key doesn't exist in the .env, fails """ if not os.path.exists(dotenv_path): - warnings.warn("can't delete from %s - it doesn't exist." % dotenv_path) + warnings.warn("can't delete from %s - it doesn't exist." % dotenv_path) # type: ignore return None, key_to_unset removed = False @@ -226,14 +265,16 @@ def unset_key(dotenv_path, key_to_unset, quote_mode="always"): dest.write(mapping.original) if not removed: - warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path)) + warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path)) # type: ignore return None, key_to_unset return removed, key_to_unset def resolve_nested_variables(values): + # type: (Dict[Text, Text]) -> Dict[Text, Text] def _replacement(name): + # type: (Text) -> Text """ get appropriate value for a variable name. first search in environ, if not found, @@ -243,6 +284,7 @@ def _replacement(name): return ret def _re_sub_callback(match_object): + # type: (Match[Text]) -> Text """ From a match object gets the variable name and returns the correct replacement @@ -258,6 +300,7 @@ def _re_sub_callback(match_object): def _walk_to_root(path): + # type: (Text) -> Iterator[Text] """ Yield directories starting from the given directory up to the root """ @@ -276,6 +319,7 @@ def _walk_to_root(path): def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False): + # type: (Text, bool, bool) -> Text """ Search in increasingly higher folders for the given file @@ -288,7 +332,14 @@ def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False): # will work for .py files frame = sys._getframe() # find first frame that is outside of this file - while frame.f_code.co_filename == __file__: + if PY2 and not __file__.endswith('.py'): + # in Python2 __file__ extension could be .pyc or .pyo (this doesn't account + # for edge case of Python compiled for non-standard extension) + current_file = __file__.rsplit('.', 1)[0] + '.py' + else: + current_file = __file__ + + while frame.f_code.co_filename == current_file: frame = frame.f_back frame_filename = frame.f_code.co_filename path = os.path.dirname(os.path.abspath(frame_filename)) @@ -304,17 +355,20 @@ def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False): return '' -def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False): +def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False, **kwargs): + # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, bool, Union[None, Text]) -> bool f = dotenv_path or stream or find_dotenv() - return DotEnv(f, verbose=verbose).set_as_environment_variables(override=override) + return DotEnv(f, verbose=verbose, **kwargs).set_as_environment_variables(override=override) -def dotenv_values(dotenv_path=None, stream=None, verbose=False): +def dotenv_values(dotenv_path=None, stream=None, verbose=False, **kwargs): + # type: (Union[Text, _PathLike, None], Optional[_StringIO], bool, Union[None, Text]) -> Dict[Text, Text] f = dotenv_path or stream or find_dotenv() - return DotEnv(f, verbose=verbose).dict() + return DotEnv(f, verbose=verbose, **kwargs).dict() def run_command(command, env): + # type: (List[str], Dict[str, str]) -> int """Run command in sub process. Runs the command in a sub process with the variables from `env` diff --git a/pipenv/vendor/dotenv/py.typed b/pipenv/vendor/dotenv/py.typed new file mode 100644 index 0000000000..7632ecf775 --- /dev/null +++ b/pipenv/vendor/dotenv/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/pipenv/vendor/dotenv/version.py b/pipenv/vendor/dotenv/version.py index 1f4c4d43b2..17c1a6260b 100644 --- a/pipenv/vendor/dotenv/version.py +++ b/pipenv/vendor/dotenv/version.py @@ -1 +1 @@ -__version__ = "0.10.1" +__version__ = "0.10.2" diff --git a/pipenv/vendor/parse.py b/pipenv/vendor/parse.py index b5d543f90d..0b5cce2395 100644 --- a/pipenv/vendor/parse.py +++ b/pipenv/vendor/parse.py @@ -345,6 +345,7 @@ **Version history (in brief)**: +- 1.12.0 Do not assume closing brace when an opening one is found (thanks @mattsep) - 1.11.1 Revert having unicode char in docstring, it breaks Bamboo builds(?!) - 1.11.0 Implement `__contains__` for Result instances. - 1.10.0 Introduce a "letters" matcher, since "w" matches numbers @@ -415,7 +416,7 @@ ''' from __future__ import absolute_import -__version__ = '1.11.1' +__version__ = '1.12.0' # yes, I now have two problems import re @@ -431,7 +432,7 @@ def with_pattern(pattern, regex_group_count=None): - """Attach a regular expression pattern matcher to a custom type converter + r"""Attach a regular expression pattern matcher to a custom type converter function. This annotates the type converter with the :attr:`pattern` attribute. @@ -885,7 +886,7 @@ def _generate_expression(self): e.append(r'\{') elif part == '}}': e.append(r'\}') - elif part[0] == '{': + elif part[0] == '{' and part[-1] == '}': # this will be a braces-delimited field to handle e.append(self._handle_field(part)) else: diff --git a/pipenv/vendor/pexpect/__init__.py b/pipenv/vendor/pexpect/__init__.py index 2a18d1911a..cf7a70d0a4 100644 --- a/pipenv/vendor/pexpect/__init__.py +++ b/pipenv/vendor/pexpect/__init__.py @@ -75,7 +75,7 @@ from .pty_spawn import spawn, spawnu from .run import run, runu -__version__ = '4.6.0' +__version__ = '4.7.0' __revision__ = '' __all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'spawnu', 'run', 'runu', 'which', 'split_command_line', '__version__', '__revision__'] diff --git a/pipenv/vendor/pexpect/_async.py b/pipenv/vendor/pexpect/_async.py index bdd515b1f5..ca2044e1cc 100644 --- a/pipenv/vendor/pexpect/_async.py +++ b/pipenv/vendor/pexpect/_async.py @@ -1,5 +1,6 @@ import asyncio import errno +import signal from pexpect import EOF @@ -29,6 +30,23 @@ def expect_async(expecter, timeout=None): transport.pause_reading() return expecter.timeout(e) +@asyncio.coroutine +def repl_run_command_async(repl, cmdlines, timeout=-1): + res = [] + repl.child.sendline(cmdlines[0]) + for line in cmdlines[1:]: + yield from repl._expect_prompt(timeout=timeout, async_=True) + res.append(repl.child.before) + repl.child.sendline(line) + + # Command was fully submitted, now wait for the next prompt + prompt_idx = yield from repl._expect_prompt(timeout=timeout, async_=True) + if prompt_idx == 1: + # We got the continuation prompt - command was incomplete + repl.child.kill(signal.SIGINT) + yield from repl._expect_prompt(timeout=1, async_=True) + raise ValueError("Continuation prompt found - input was incomplete:") + return u''.join(res + [repl.child.before]) class PatternWaiter(asyncio.Protocol): transport = None @@ -41,7 +59,7 @@ def found(self, result): if not self.fut.done(): self.fut.set_result(result) self.transport.pause_reading() - + def error(self, exc): if not self.fut.done(): self.fut.set_exception(exc) @@ -49,7 +67,7 @@ def error(self, exc): def connection_made(self, transport): self.transport = transport - + def data_received(self, data): spawn = self.expecter.spawn s = spawn._decoder.decode(data) @@ -67,7 +85,7 @@ def data_received(self, data): except Exception as e: self.expecter.errored() self.error(e) - + def eof_received(self): # N.B. If this gets called, async will close the pipe (the spawn object) # for us @@ -78,7 +96,7 @@ def eof_received(self): self.error(e) else: self.found(index) - + def connection_lost(self, exc): if isinstance(exc, OSError) and exc.errno == errno.EIO: # We may get here without eof_received being called, e.g on Linux diff --git a/pipenv/vendor/pexpect/expect.py b/pipenv/vendor/pexpect/expect.py index 1c0275b485..db376d593c 100644 --- a/pipenv/vendor/pexpect/expect.py +++ b/pipenv/vendor/pexpect/expect.py @@ -244,7 +244,7 @@ def __init__(self, patterns): self.eof_index = -1 self.timeout_index = -1 self._searches = [] - for n, s in zip(list(range(len(patterns))), patterns): + for n, s in enumerate(patterns): if s is EOF: self.eof_index = n continue diff --git a/pipenv/vendor/pexpect/pty_spawn.py b/pipenv/vendor/pexpect/pty_spawn.py index e0e2b54fd0..691c2c63f0 100644 --- a/pipenv/vendor/pexpect/pty_spawn.py +++ b/pipenv/vendor/pexpect/pty_spawn.py @@ -430,61 +430,83 @@ def read_nonblocking(self, size=1, timeout=-1): available right away then one character will be returned immediately. It will not wait for 30 seconds for another 99 characters to come in. - This is a wrapper around os.read(). It uses select.select() to - implement the timeout. ''' + On the other hand, if there are bytes available to read immediately, + all those bytes will be read (up to the buffer size). So, if the + buffer size is 1 megabyte and there is 1 megabyte of data available + to read, the buffer will be filled, regardless of timeout. + + This is a wrapper around os.read(). It uses select.select() or + select.poll() to implement the timeout. ''' if self.closed: raise ValueError('I/O operation on closed file.') + if self.use_poll: + def select(timeout): + return poll_ignore_interrupts([self.child_fd], timeout) + else: + def select(timeout): + return select_ignore_interrupts([self.child_fd], [], [], timeout)[0] + + # If there is data available to read right now, read as much as + # we can. We do this to increase performance if there are a lot + # of bytes to be read. This also avoids calling isalive() too + # often. See also: + # * https://github.com/pexpect/pexpect/pull/304 + # * http://trac.sagemath.org/ticket/10295 + if select(0): + try: + incoming = super(spawn, self).read_nonblocking(size) + except EOF: + # Maybe the child is dead: update some attributes in that case + self.isalive() + raise + while len(incoming) < size and select(0): + try: + incoming += super(spawn, self).read_nonblocking(size - len(incoming)) + except EOF: + # Maybe the child is dead: update some attributes in that case + self.isalive() + # Don't raise EOF, just return what we read so far. + return incoming + return incoming + if timeout == -1: timeout = self.timeout - # Note that some systems such as Solaris do not give an EOF when - # the child dies. In fact, you can still try to read - # from the child_fd -- it will block forever or until TIMEOUT. - # For this case, I test isalive() before doing any reading. - # If isalive() is false, then I pretend that this is the same as EOF. if not self.isalive(): - # timeout of 0 means "poll" - if self.use_poll: - r = poll_ignore_interrupts([self.child_fd], timeout) - else: - r, w, e = select_ignore_interrupts([self.child_fd], [], [], 0) - if not r: - self.flag_eof = True - raise EOF('End Of File (EOF). Braindead platform.') + # The process is dead, but there may or may not be data + # available to read. Note that some systems such as Solaris + # do not give an EOF when the child dies. In fact, you can + # still try to read from the child_fd -- it will block + # forever or until TIMEOUT. For that reason, it's important + # to do this check before calling select() with timeout. + if select(0): + return super(spawn, self).read_nonblocking(size) + self.flag_eof = True + raise EOF('End Of File (EOF). Braindead platform.') elif self.__irix_hack: # Irix takes a long time before it realizes a child was terminated. + # Make sure that the timeout is at least 2 seconds. # FIXME So does this mean Irix systems are forced to always have # FIXME a 2 second delay when calling read_nonblocking? That sucks. - if self.use_poll: - r = poll_ignore_interrupts([self.child_fd], timeout) - else: - r, w, e = select_ignore_interrupts([self.child_fd], [], [], 2) - if not r and not self.isalive(): - self.flag_eof = True - raise EOF('End Of File (EOF). Slow platform.') - if self.use_poll: - r = poll_ignore_interrupts([self.child_fd], timeout) - else: - r, w, e = select_ignore_interrupts( - [self.child_fd], [], [], timeout - ) - - if not r: - if not self.isalive(): - # Some platforms, such as Irix, will claim that their - # processes are alive; timeout on the select; and - # then finally admit that they are not alive. - self.flag_eof = True - raise EOF('End of File (EOF). Very slow platform.') - else: - raise TIMEOUT('Timeout exceeded.') + if timeout is not None and timeout < 2: + timeout = 2 - if self.child_fd in r: + # Because of the select(0) check above, we know that no data + # is available right now. But if a non-zero timeout is given + # (possibly timeout=None), we call select() with a timeout. + if (timeout != 0) and select(timeout): return super(spawn, self).read_nonblocking(size) - raise ExceptionPexpect('Reached an unexpected state.') # pragma: no cover + if not self.isalive(): + # Some platforms, such as Irix, will claim that their + # processes are alive; timeout on the select; and + # then finally admit that they are not alive. + self.flag_eof = True + raise EOF('End of File (EOF). Very slow platform.') + else: + raise TIMEOUT('Timeout exceeded.') def write(self, s): '''This is similar to send() except that there is no return value. diff --git a/pipenv/vendor/pexpect/pxssh.py b/pipenv/vendor/pexpect/pxssh.py index ef2e91186b..3d53bd9746 100644 --- a/pipenv/vendor/pexpect/pxssh.py +++ b/pipenv/vendor/pexpect/pxssh.py @@ -109,7 +109,7 @@ class pxssh (spawn): username = raw_input('username: ') password = getpass.getpass('password: ') s.login (hostname, username, password) - + `debug_command_string` is only for the test suite to confirm that the string generated for SSH is correct, using this will not allow you to do anything other than get a string back from `pxssh.pxssh.login()`. @@ -118,12 +118,12 @@ class pxssh (spawn): def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, logfile=None, cwd=None, env=None, ignore_sighup=True, echo=True, options={}, encoding=None, codec_errors='strict', - debug_command_string=False): + debug_command_string=False, use_poll=False): spawn.__init__(self, None, timeout=timeout, maxread=maxread, searchwindowsize=searchwindowsize, logfile=logfile, cwd=cwd, env=env, ignore_sighup=ignore_sighup, echo=echo, - encoding=encoding, codec_errors=codec_errors) + encoding=encoding, codec_errors=codec_errors, use_poll=use_poll) self.name = '' @@ -154,7 +154,7 @@ def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, # Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying! #self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'" self.force_password = False - + self.debug_command_string = debug_command_string # User defined SSH options, eg, @@ -220,7 +220,7 @@ def sync_original_prompt (self, sync_multiplier=1.0): can take 12 seconds. Low latency connections are more likely to fail with a low sync_multiplier. Best case sync time gets worse with a high sync multiplier (500 ms with default). ''' - + # All of these timing pace values are magic. # I came up with these based on what seemed reliable for # connecting to a heavily loaded machine I have. @@ -253,20 +253,19 @@ def sync_original_prompt (self, sync_multiplier=1.0): ### TODO: This is getting messy and I'm pretty sure this isn't perfect. ### TODO: I need to draw a flow chart for this. ### TODO: Unit tests for SSH tunnels, remote SSH command exec, disabling original prompt sync - def login (self, server, username, password='', terminal_type='ansi', + def login (self, server, username=None, password='', terminal_type='ansi', original_prompt=r"[#$]", login_timeout=10, port=None, auto_prompt_reset=True, ssh_key=None, quiet=True, sync_multiplier=1, check_local_ip=True, password_regex=r'(?i)(?:password:)|(?:passphrase for key)', ssh_tunnels={}, spawn_local_ssh=True, - sync_original_prompt=True, ssh_config=None): + sync_original_prompt=True, ssh_config=None, cmd='ssh'): '''This logs the user into the given server. - It uses - 'original_prompt' to try to find the prompt right after login. When it - finds the prompt it immediately tries to reset the prompt to something - more easily matched. The default 'original_prompt' is very optimistic - and is easily fooled. It's more reliable to try to match the original + It uses 'original_prompt' to try to find the prompt right after login. + When it finds the prompt it immediately tries to reset the prompt to + something more easily matched. The default 'original_prompt' is very + optimistic and is easily fooled. It's more reliable to try to match the original prompt as exactly as possible to prevent false matches by server strings such as the "Message Of The Day". On many systems you can disable the MOTD on the remote server by creating a zero-length file @@ -284,27 +283,31 @@ def login (self, server, username, password='', terminal_type='ansi', uses a unique prompt in the :meth:`prompt` method. If the original prompt is not reset then this will disable the :meth:`prompt` method unless you manually set the :attr:`PROMPT` attribute. - + Set ``password_regex`` if there is a MOTD message with `password` in it. Changing this is like playing in traffic, don't (p)expect it to match straight away. - + If you require to connect to another SSH server from the your original SSH connection set ``spawn_local_ssh`` to `False` and this will use your current session to do so. Setting this option to `False` and not having an active session will trigger an error. - + Set ``ssh_key`` to a file path to an SSH private key to use that SSH key for the session authentication. Set ``ssh_key`` to `True` to force passing the current SSH authentication socket to the desired ``hostname``. - + Set ``ssh_config`` to a file path string of an SSH client config file to pass that file to the client to handle itself. You may set any options you wish in here, however doing so will require you to post extra information that you may not want to if you run into issues. + + Alter the ``cmd`` to change the ssh client used, or to prepend it with network + namespaces. For example ```cmd="ip netns exec vlan2 ssh"``` to execute the ssh in + network namespace named ```vlan```. ''' - + session_regex_array = ["(?i)are you sure you want to continue connecting", original_prompt, password_regex, "(?i)permission denied", "(?i)terminal type", TIMEOUT] session_init_regex_array = [] session_init_regex_array.extend(session_regex_array) @@ -320,7 +323,7 @@ def login (self, server, username, password='', terminal_type='ansi', if ssh_config is not None: if spawn_local_ssh and not os.path.isfile(ssh_config): raise ExceptionPxssh('SSH config does not exist or is not a file.') - ssh_options = ssh_options + '-F ' + ssh_config + ssh_options = ssh_options + ' -F ' + ssh_config if port is not None: ssh_options = ssh_options + ' -p %s'%(str(port)) if ssh_key is not None: @@ -331,7 +334,7 @@ def login (self, server, username, password='', terminal_type='ansi', if spawn_local_ssh and not os.path.isfile(ssh_key): raise ExceptionPxssh('private ssh key does not exist or is not a file.') ssh_options = ssh_options + ' -i %s' % (ssh_key) - + # SSH tunnels, make sure you know what you're putting into the lists # under each heading. Do not expect these to open 100% of the time, # The port you're requesting might be bound. @@ -354,7 +357,42 @@ def login (self, server, username, password='', terminal_type='ansi', if spawn_local_ssh==False: tunnel = quote(str(tunnel)) ssh_options = ssh_options + ' -' + cmd_type + ' ' + str(tunnel) - cmd = "ssh %s -l %s %s" % (ssh_options, username, server) + + if username is not None: + ssh_options = ssh_options + ' -l ' + username + elif ssh_config is None: + raise TypeError('login() needs either a username or an ssh_config') + else: # make sure ssh_config has an entry for the server with a username + with open(ssh_config, 'rt') as f: + lines = [l.strip() for l in f.readlines()] + + server_regex = r'^Host\s+%s\s*$' % server + user_regex = r'^User\s+\w+\s*$' + config_has_server = False + server_has_username = False + for line in lines: + if not config_has_server and re.match(server_regex, line, re.IGNORECASE): + config_has_server = True + elif config_has_server and 'hostname' in line.lower(): + pass + elif config_has_server and 'host' in line.lower(): + server_has_username = False # insurance + break # we have left the relevant section + elif config_has_server and re.match(user_regex, line, re.IGNORECASE): + server_has_username = True + break + + if lines: + del line + + del lines + + if not config_has_server: + raise TypeError('login() ssh_config has no Host entry for %s' % server) + elif not server_has_username: + raise TypeError('login() ssh_config has no user entry for %s' % server) + + cmd += " %s %s" % (ssh_options, server) if self.debug_command_string: return(cmd) diff --git a/pipenv/vendor/pexpect/replwrap.py b/pipenv/vendor/pexpect/replwrap.py index ed0e657d73..c930f1e4fe 100644 --- a/pipenv/vendor/pexpect/replwrap.py +++ b/pipenv/vendor/pexpect/replwrap.py @@ -61,11 +61,11 @@ def set_prompt(self, orig_prompt, prompt_change): self.child.expect(orig_prompt) self.child.sendline(prompt_change) - def _expect_prompt(self, timeout=-1): + def _expect_prompt(self, timeout=-1, async_=False): return self.child.expect_exact([self.prompt, self.continuation_prompt], - timeout=timeout) + timeout=timeout, async_=async_) - def run_command(self, command, timeout=-1): + def run_command(self, command, timeout=-1, async_=False): """Send a command to the REPL, wait for and return output. :param str command: The command to send. Trailing newlines are not needed. @@ -75,6 +75,10 @@ def run_command(self, command, timeout=-1): :param int timeout: How long to wait for the next prompt. -1 means the default from the :class:`pexpect.spawn` object (default 30 seconds). None means to wait indefinitely. + :param bool async_: On Python 3.4, or Python 3.3 with asyncio + installed, passing ``async_=True`` will make this return an + :mod:`asyncio` Future, which you can yield from to get the same + result that this method would normally give directly. """ # Split up multiline commands and feed them in bit-by-bit cmdlines = command.splitlines() @@ -84,6 +88,10 @@ def run_command(self, command, timeout=-1): if not cmdlines: raise ValueError("No command was given") + if async_: + from ._async import repl_run_command_async + return repl_run_command_async(self, cmdlines, timeout) + res = [] self.child.sendline(cmdlines[0]) for line in cmdlines[1:]: diff --git a/pipenv/vendor/pythonfinder/_vendor/pep514tools/LICENSE b/pipenv/vendor/pythonfinder/_vendor/pep514tools/LICENSE new file mode 100644 index 0000000000..c7ac395fb9 --- /dev/null +++ b/pipenv/vendor/pythonfinder/_vendor/pep514tools/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Steve Dower + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/vendor/pythonfinder/_vendor/pep514tools/_registry.py b/pipenv/vendor/pythonfinder/_vendor/pep514tools/_registry.py index 2611f60127..da72ecb541 100644 --- a/pipenv/vendor/pythonfinder/_vendor/pep514tools/_registry.py +++ b/pipenv/vendor/pythonfinder/_vendor/pep514tools/_registry.py @@ -172,7 +172,7 @@ def set_all_values(self, info): items = info.items() else: raise TypeError('info must be a dictionary') - + self._set_all_values(self._root, self.subkey, items, errors) if len(errors) == 1: raise ValueError(errors[0]) diff --git a/pipenv/vendor/pythonfinder/_vendor/vendor.txt b/pipenv/vendor/pythonfinder/_vendor/vendor.txt index 8875249845..e635a2a8c1 100644 --- a/pipenv/vendor/pythonfinder/_vendor/vendor.txt +++ b/pipenv/vendor/pythonfinder/_vendor/vendor.txt @@ -1 +1 @@ --e git+https://github.com/zooba/pep514tools.git@320e48745660b696e2dcaee888fc2e516b435e48#egg=pep514tools +git+https://github.com/zooba/pep514tools.git@master#egg=pep514tools diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index 77ab414eac..a728058a9b 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.4.3" +__version__ = "1.5.0" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py index 503a13d071..3769dbac1f 100644 --- a/pipenv/vendor/requirementslib/utils.py +++ b/pipenv/vendor/requirementslib/utils.py @@ -121,7 +121,7 @@ def strip_ssh_from_git_uri(uri): def add_ssh_scheme_to_git_uri(uri): # type: (S) -> S - """Cleans VCS uris from pip format""" + """Cleans VCS uris from pipenv.patched.notpip format""" if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: diff --git a/pipenv/vendor/scandir.py b/pipenv/vendor/scandir.py index 8bbae2c5cc..44f949fd39 100644 --- a/pipenv/vendor/scandir.py +++ b/pipenv/vendor/scandir.py @@ -37,7 +37,7 @@ warnings.warn("scandir can't find the compiled _scandir C module " "or ctypes, using slow generic fallback") -__version__ = '1.9.0' +__version__ = '1.10.0' __all__ = ['scandir', 'walk'] # Windows FILE_ATTRIBUTE constants for interpreting the @@ -583,7 +583,7 @@ def scandir_python(path=unicode('.')): if _scandir is not None: scandir = scandir_c DirEntry = DirEntry_c - elif ctypes is not None: + elif ctypes is not None and have_dirent_d_type: scandir = scandir_python DirEntry = PosixDirEntry else: diff --git a/pipenv/vendor/shellingham/__init__.py b/pipenv/vendor/shellingham/__init__.py index f879cf9d32..b834b74b3f 100644 --- a/pipenv/vendor/shellingham/__init__.py +++ b/pipenv/vendor/shellingham/__init__.py @@ -4,7 +4,7 @@ from ._core import ShellDetectionFailure -__version__ = '1.2.8' +__version__ = '1.3.1' def detect_shell(pid=None, max_depth=6): diff --git a/pipenv/vendor/shellingham/_core.py b/pipenv/vendor/shellingham/_core.py index fb988eb398..da103a9471 100644 --- a/pipenv/vendor/shellingham/_core.py +++ b/pipenv/vendor/shellingham/_core.py @@ -1,5 +1,5 @@ SHELL_NAMES = { - 'sh', 'bash', 'dash', # Bourne. + 'sh', 'bash', 'dash', 'ash', # Bourne. 'csh', 'tcsh', # C. 'ksh', 'zsh', 'fish', # Common alternatives. 'cmd', 'powershell', 'pwsh', # Microsoft. diff --git a/pipenv/vendor/shellingham/posix.py b/pipenv/vendor/shellingham/posix.py index b25dd87456..0bbf988b90 100644 --- a/pipenv/vendor/shellingham/posix.py +++ b/pipenv/vendor/shellingham/posix.py @@ -21,7 +21,7 @@ def _get_process_mapping(): processes = {} for line in output.split('\n'): try: - pid, ppid, args = line.strip().split(maxsplit=2) + pid, ppid, args = line.strip().split(None, 2) except ValueError: continue processes[pid] = Process( diff --git a/pipenv/vendor/shellingham/posix/_default.py b/pipenv/vendor/shellingham/posix/_default.py deleted file mode 100644 index 8694427611..0000000000 --- a/pipenv/vendor/shellingham/posix/_default.py +++ /dev/null @@ -1,27 +0,0 @@ -import collections -import shlex -import subprocess -import sys - - -Process = collections.namedtuple('Process', 'args pid ppid') - - -def get_process_mapping(): - """Try to look up the process tree via the output of `ps`. - """ - output = subprocess.check_output([ - 'ps', '-ww', '-o', 'pid=', '-o', 'ppid=', '-o', 'args=', - ]) - if not isinstance(output, str): - output = output.decode(sys.stdout.encoding) - processes = {} - for line in output.split('\n'): - try: - pid, ppid, args = line.strip().split(None, 2) - except ValueError: - continue - processes[pid] = Process( - args=tuple(shlex.split(args)), pid=pid, ppid=ppid, - ) - return processes diff --git a/pipenv/vendor/shellingham/posix/_proc.py b/pipenv/vendor/shellingham/posix/_proc.py index 921f250819..e3a6e46db0 100644 --- a/pipenv/vendor/shellingham/posix/_proc.py +++ b/pipenv/vendor/shellingham/posix/_proc.py @@ -1,40 +1,34 @@ import os import re -from ._core import Process +from ._default import Process STAT_PPID = 3 STAT_TTY = 6 -STAT_PATTERN = re.compile(r'\(.+\)|\S+') - - -def _get_stat(pid): - with open(os.path.join('/proc', str(pid), 'stat')) as f: - parts = STAT_PATTERN.findall(f.read()) - return parts[STAT_TTY], parts[STAT_PPID] - - -def _get_cmdline(pid): - with open(os.path.join('/proc', str(pid), 'cmdline')) as f: - return tuple(f.read().split('\0')[:-1]) - def get_process_mapping(): """Try to look up the process tree via the /proc interface. """ - self_tty = _get_stat(os.getpid())[0] + with open('/proc/{0}/stat'.format(os.getpid())) as f: + self_tty = f.read().split()[STAT_TTY] processes = {} for pid in os.listdir('/proc'): if not pid.isdigit(): continue try: - tty, ppid = _get_stat(pid) - if tty != self_tty: - continue - args = _get_cmdline(pid) - processes[pid] = Process(args=args, pid=pid, ppid=ppid) + stat = '/proc/{0}/stat'.format(pid) + cmdline = '/proc/{0}/cmdline'.format(pid) + with open(stat) as fstat, open(cmdline) as fcmdline: + stat = re.findall(r'\(.+\)|\S+', fstat.read()) + cmd = fcmdline.read().split('\x00')[:-1] + ppid = stat[STAT_PPID] + tty = stat[STAT_TTY] + if tty == self_tty: + processes[pid] = Process( + args=tuple(cmd), pid=pid, ppid=ppid, + ) except IOError: # Process has disappeared - just ignore it. continue diff --git a/pipenv/vendor/shellingham/posix/_ps.py b/pipenv/vendor/shellingham/posix/_ps.py index e96278cf5f..8694427611 100644 --- a/pipenv/vendor/shellingham/posix/_ps.py +++ b/pipenv/vendor/shellingham/posix/_ps.py @@ -1,8 +1,10 @@ +import collections import shlex import subprocess import sys -from ._core import Process + +Process = collections.namedtuple('Process', 'args pid ppid') def get_process_mapping(): diff --git a/pipenv/vendor/shellingham/posix/linux.py b/pipenv/vendor/shellingham/posix/linux.py deleted file mode 100644 index 6db9783481..0000000000 --- a/pipenv/vendor/shellingham/posix/linux.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import re - -from ._default import Process - - -STAT_PPID = 3 -STAT_TTY = 6 - - -def get_process_mapping(): - """Try to look up the process tree via Linux's /proc - """ - with open('/proc/{0}/stat'.format(os.getpid())) as f: - self_tty = f.read().split()[STAT_TTY] - processes = {} - for pid in os.listdir('/proc'): - if not pid.isdigit(): - continue - try: - stat = '/proc/{0}/stat'.format(pid) - cmdline = '/proc/{0}/cmdline'.format(pid) - with open(stat) as fstat, open(cmdline) as fcmdline: - stat = re.findall(r'\(.+\)|\S+', fstat.read()) - cmd = fcmdline.read().split('\x00')[:-1] - ppid = stat[STAT_PPID] - tty = stat[STAT_TTY] - if tty == self_tty: - processes[pid] = Process( - args=tuple(cmd), pid=pid, ppid=ppid, - ) - except IOError: - # Process has disappeared - just ignore it. - continue - return processes diff --git a/pipenv/vendor/urllib3/LICENSE.txt b/pipenv/vendor/urllib3/LICENSE.txt index 1c3283ee5b..c89cf27b85 100644 --- a/pipenv/vendor/urllib3/LICENSE.txt +++ b/pipenv/vendor/urllib3/LICENSE.txt @@ -1,19 +1,21 @@ -This is the MIT license: http://www.opensource.org/licenses/mit-license.php +MIT License -Copyright 2008-2016 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -Permission is hereby granted, free of charge, to any person obtaining a copy of this -software and associated documentation files (the "Software"), to deal in the Software -without restriction, including without limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons -to whom the Software is furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all copies or -substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR -PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE -FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/vendor/urllib3/__init__.py b/pipenv/vendor/urllib3/__init__.py index 148a9c31a7..eb9158867a 100644 --- a/pipenv/vendor/urllib3/__init__.py +++ b/pipenv/vendor/urllib3/__init__.py @@ -1,7 +1,6 @@ """ urllib3 - Thread-safe connection pooling and re-using. """ - from __future__ import absolute_import import warnings @@ -27,7 +26,7 @@ __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.24.1' +__version__ = '1.25.2' __all__ = ( 'HTTPConnectionPool', diff --git a/pipenv/vendor/urllib3/connection.py b/pipenv/vendor/urllib3/connection.py index 02b36654bd..f816ee807d 100644 --- a/pipenv/vendor/urllib3/connection.py +++ b/pipenv/vendor/urllib3/connection.py @@ -19,10 +19,11 @@ class BaseSSLError(BaseException): pass -try: # Python 3: - # Not a no-op, we're adding this to the namespace so it can be imported. +try: + # Python 3: not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError -except NameError: # Python 2: +except NameError: + # Python 2 class ConnectionError(Exception): pass @@ -101,7 +102,7 @@ class HTTPConnection(_HTTPConnection, object): is_verified = False def __init__(self, *args, **kw): - if six.PY3: # Python 3 + if six.PY3: kw.pop('strict', None) # Pre-set source_address. @@ -158,7 +159,7 @@ def _new_conn(self): conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw) - except SocketTimeout as e: + except SocketTimeout: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) @@ -171,7 +172,8 @@ def _new_conn(self): def _prepare_conn(self, conn): self.sock = conn - if self._tunnel_host: + # Google App Engine's httplib does not define _tunnel_host + if getattr(self, '_tunnel_host', None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -226,7 +228,8 @@ class HTTPSConnection(HTTPConnection): ssl_version = None def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + key_password=None, strict=None, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, ssl_context=None, server_hostname=None, **kw): HTTPConnection.__init__(self, host, port, strict=strict, @@ -234,6 +237,7 @@ def __init__(self, host, port=None, key_file=None, cert_file=None, self.key_file = key_file self.cert_file = cert_file + self.key_password = key_password self.ssl_context = ssl_context self.server_hostname = server_hostname @@ -255,6 +259,7 @@ def connect(self): sock=conn, keyfile=self.key_file, certfile=self.cert_file, + key_password=self.key_password, ssl_context=self.ssl_context, server_hostname=self.server_hostname ) @@ -272,25 +277,24 @@ class VerifiedHTTPSConnection(HTTPSConnection): assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None, + cert_reqs=None, key_password=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None): """ This method should only be called once, before the connection is used. """ - # If cert_reqs is not provided, we can try to guess. If the user gave - # us a cert database, we assume they want to use it: otherwise, if - # they gave us an SSL Context object we should use whatever is set for - # it. + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. if cert_reqs is None: - if ca_certs or ca_cert_dir: - cert_reqs = 'CERT_REQUIRED' - elif self.ssl_context is not None: + if self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs + self.key_password = key_password self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) @@ -301,7 +305,8 @@ def connect(self): conn = self._new_conn() hostname = self.host - if self._tunnel_host: + # Google App Engine's httplib does not define _tunnel_host + if getattr(self, '_tunnel_host', None): self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. @@ -338,6 +343,7 @@ def connect(self): sock=conn, keyfile=self.key_file, certfile=self.cert_file, + key_password=self.key_password, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, server_hostname=server_hostname, diff --git a/pipenv/vendor/urllib3/connectionpool.py b/pipenv/vendor/urllib3/connectionpool.py index f7a8f193d1..157568a395 100644 --- a/pipenv/vendor/urllib3/connectionpool.py +++ b/pipenv/vendor/urllib3/connectionpool.py @@ -26,6 +26,7 @@ from .packages.ssl_match_hostname import CertificateError from .packages import six from .packages.six.moves import queue +from .packages.rfc3986.normalizers import normalize_host from .connection import ( port_by_scheme, DummyConnection, @@ -65,7 +66,7 @@ def __init__(self, host, port=None): if not host: raise LocationValueError("No host specified.") - self.host = _ipv6_host(host, self.scheme) + self.host = _normalize_host(host, scheme=self.scheme) self._proxy_host = host.lower() self.port = port @@ -373,9 +374,11 @@ def _make_request(self, conn, method, url, timeout=_Default, chunked=False, # Receive the response from the server try: - try: # Python 2.7, use buffering of HTTP responses + try: + # Python 2.7, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 3 + except TypeError: + # Python 3 try: httplib_response = conn.getresponse() except Exception as e: @@ -432,8 +435,8 @@ def is_same_host(self, url): # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) - - host = _ipv6_host(host, self.scheme) + if host is not None: + host = _normalize_host(host, scheme=scheme) # Use explicit default port for comparison when none is given if self.port and not port: @@ -672,7 +675,7 @@ def drain_and_release_conn(response): # released back to the pool once the entire response is read response.read() except (TimeoutError, HTTPException, SocketError, ProtocolError, - BaseSSLError, SSLError) as e: + BaseSSLError, SSLError): pass # Handle redirect? @@ -746,8 +749,8 @@ class HTTPSConnectionPool(HTTPConnectionPool): If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, - ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is - available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. """ @@ -759,7 +762,7 @@ def __init__(self, host, port=None, block=False, headers=None, retries=None, _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, - ca_certs=None, ssl_version=None, + key_password=None, ca_certs=None, ssl_version=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None, **conn_kw): @@ -767,12 +770,10 @@ def __init__(self, host, port=None, block, headers, retries, _proxy, _proxy_headers, **conn_kw) - if ca_certs and cert_reqs is None: - cert_reqs = 'CERT_REQUIRED' - self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs + self.key_password = key_password self.ca_certs = ca_certs self.ca_cert_dir = ca_cert_dir self.ssl_version = ssl_version @@ -787,6 +788,7 @@ def _prepare_conn(self, conn): if isinstance(conn, VerifiedHTTPSConnection): conn.set_cert(key_file=self.key_file, + key_password=self.key_password, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, @@ -824,7 +826,9 @@ def _new_conn(self): conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, - strict=self.strict, **self.conn_kw) + strict=self.strict, cert_file=self.cert_file, + key_file=self.key_file, key_password=self.key_password, + **self.conn_kw) return self._prepare_conn(conn) @@ -875,9 +879,9 @@ def connection_from_url(url, **kw): return HTTPConnectionPool(host, port=port, **kw) -def _ipv6_host(host, scheme): +def _normalize_host(host, scheme): """ - Process IPv6 address literals + Normalize hosts for comparisons and use with sockets. """ # httplib doesn't like it when we include brackets in IPv6 addresses @@ -886,11 +890,8 @@ def _ipv6_host(host, scheme): # Instead, we need to make sure we never pass ``None`` as the port. # However, for backward compatibility reasons we can't actually # *assert* that. See http://bugs.python.org/issue28539 - # - # Also if an IPv6 address literal has a zone identifier, the - # percent sign might be URIencoded, convert it back into ASCII if host.startswith('[') and host.endswith(']'): - host = host.replace('%25', '%').strip('[]') + host = host.strip('[]') if scheme in NORMALIZABLE_SCHEMES: - host = host.lower() + host = normalize_host(host) return host diff --git a/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py b/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py index bcf41c02b2..be34215359 100644 --- a/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py +++ b/pipenv/vendor/urllib3/contrib/_securetransport/bindings.py @@ -516,6 +516,8 @@ class SecurityConst(object): kTLSProtocol1 = 4 kTLSProtocol11 = 7 kTLSProtocol12 = 8 + kTLSProtocol13 = 10 + kTLSProtocolMaxSupported = 999 kSSLClientSide = 1 kSSLStreamType = 0 @@ -558,30 +560,27 @@ class SecurityConst(object): errSecInvalidTrustSettings = -25262 # Cipher suites. We only pick the ones our default cipher string allows. + # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F - TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 + TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 + TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F - TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B - TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 - TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 - TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D @@ -590,4 +589,5 @@ class SecurityConst(object): TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F TLS_AES_128_GCM_SHA256 = 0x1301 TLS_AES_256_GCM_SHA384 = 0x1302 - TLS_CHACHA20_POLY1305_SHA256 = 0x1303 + TLS_AES_128_CCM_8_SHA256 = 0x1305 + TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/pipenv/vendor/urllib3/contrib/pyopenssl.py b/pipenv/vendor/urllib3/contrib/pyopenssl.py index 7c0e9465d9..821c174fdc 100644 --- a/pipenv/vendor/urllib3/contrib/pyopenssl.py +++ b/pipenv/vendor/urllib3/contrib/pyopenssl.py @@ -70,6 +70,7 @@ class UnsupportedExtension(Exception): from .. import util + __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI always works. @@ -77,20 +78,19 @@ class UnsupportedExtension(Exception): # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } +if hasattr(ssl, 'PROTOCOL_SSLv3') and hasattr(OpenSSL.SSL, 'SSLv3_METHOD'): + _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD + if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD -try: - _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) -except AttributeError: - pass _stdlib_to_openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, @@ -117,6 +117,7 @@ def inject_into_urllib3(): _validate_dependencies_met() + util.SSLContext = PyOpenSSLContext util.ssl_.SSLContext = PyOpenSSLContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI @@ -127,6 +128,7 @@ def inject_into_urllib3(): def extract_from_urllib3(): 'Undo monkey-patching by :func:`inject_into_urllib3`.' + util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI @@ -184,6 +186,10 @@ def idna_encode(name): except idna.core.IDNAError: return None + # Don't send IPv6 addresses through the IDNA encoder. + if ':' in name: + return name + name = idna_encode(name) if name is None: return None @@ -276,7 +282,7 @@ def recv(self, *args, **kwargs): return b'' else: raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: + except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return b'' else: @@ -286,6 +292,10 @@ def recv(self, *args, **kwargs): raise timeout('The read operation timed out') else: return self.recv(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) else: return data @@ -297,7 +307,7 @@ def recv_into(self, *args, **kwargs): return 0 else: raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError as e: + except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return 0 else: @@ -308,6 +318,10 @@ def recv_into(self, *args, **kwargs): else: return self.recv_into(*args, **kwargs) + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + def settimeout(self, timeout): return self.socket.settimeout(timeout) @@ -360,6 +374,9 @@ def getpeercert(self, binary_form=False): 'subjectAltName': get_subj_alt_name(x509) } + def version(self): + return self.connection.get_protocol_version_name() + def _reuse(self): self._makefile_refs += 1 @@ -432,7 +449,9 @@ def load_verify_locations(self, cafile=None, capath=None, cadata=None): def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.use_certificate_chain_file(certfile) if password is not None: - self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) + if not isinstance(password, six.binary_type): + password = password.encode('utf-8') + self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) def wrap_socket(self, sock, server_side=False, diff --git a/pipenv/vendor/urllib3/contrib/securetransport.py b/pipenv/vendor/urllib3/contrib/securetransport.py index 77cb59ed71..4dc4848416 100644 --- a/pipenv/vendor/urllib3/contrib/securetransport.py +++ b/pipenv/vendor/urllib3/contrib/securetransport.py @@ -23,6 +23,31 @@ urllib3.contrib.securetransport.inject_into_urllib3() Happy TLSing! + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. """ from __future__ import absolute_import @@ -86,35 +111,32 @@ # individual cipher suites. We need to do this because this is how # SecureTransport wants them. CIPHER_SUITES = [ - SecurityConst.TLS_AES_256_GCM_SHA384, - SecurityConst.TLS_CHACHA20_POLY1305_SHA256, - SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, - SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, - SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_AES_128_CCM_8_SHA256, + SecurityConst.TLS_AES_128_CCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, @@ -122,9 +144,10 @@ ] # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of -# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+ _protocol_to_min_max = { - ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocolMaxSupported), } if hasattr(ssl, "PROTOCOL_SSLv2"): @@ -147,14 +170,13 @@ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 ) -if hasattr(ssl, "PROTOCOL_TLS"): - _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] def inject_into_urllib3(): """ Monkey-patch urllib3 with SecureTransport-backed SSL-support. """ + util.SSLContext = SecureTransportContext util.ssl_.SSLContext = SecureTransportContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI @@ -166,6 +188,7 @@ def extract_from_urllib3(): """ Undo monkey-patching by :func:`inject_into_urllib3`. """ + util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI @@ -458,7 +481,14 @@ def handshake(self, # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) + + # TLS 1.3 isn't necessarily enabled by the OS + # so we have to detect when we error out and try + # setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported + # was added in macOS 10.13 along with kTLSProtocol13. result = Security.SSLSetProtocolVersionMax(self.context, max_version) + if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported: + result = Security.SSLSetProtocolVersionMax(self.context, SecurityConst.kTLSProtocol12) _assert_no_error(result) # If there's a trust DB, we need to use it. We do that by telling @@ -667,6 +697,25 @@ def getpeercert(self, binary_form=False): return der_bytes + def version(self): + protocol = Security.SSLProtocol() + result = Security.SSLGetNegotiatedProtocolVersion(self.context, ctypes.byref(protocol)) + _assert_no_error(result) + if protocol.value == SecurityConst.kTLSProtocol13: + return 'TLSv1.3' + elif protocol.value == SecurityConst.kTLSProtocol12: + return 'TLSv1.2' + elif protocol.value == SecurityConst.kTLSProtocol11: + return 'TLSv1.1' + elif protocol.value == SecurityConst.kTLSProtocol1: + return 'TLSv1' + elif protocol.value == SecurityConst.kSSLProtocol3: + return 'SSLv3' + elif protocol.value == SecurityConst.kSSLProtocol2: + return 'SSLv2' + else: + raise ssl.SSLError('Unknown TLS version: %r' % protocol) + def _reuse(self): self._makefile_refs += 1 diff --git a/pipenv/vendor/urllib3/contrib/socks.py b/pipenv/vendor/urllib3/contrib/socks.py index 811e312ec8..636d261fb0 100644 --- a/pipenv/vendor/urllib3/contrib/socks.py +++ b/pipenv/vendor/urllib3/contrib/socks.py @@ -1,25 +1,38 @@ # -*- coding: utf-8 -*- """ This module contains provisional support for SOCKS proxies from within -urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: -- SOCKS4 -- SOCKS4a -- SOCKS5 +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy -Known Limitations: + .. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request:: + + proxy_url="socks4a://@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy:: + + proxy_url="socks5h://:@proxy-host" -- Currently PySocks does not support contacting remote websites via literal - IPv6 addresses. Any such connection attempt will fail. You must use a domain - name. -- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any - such connection attempt will fail. """ from __future__ import absolute_import @@ -88,7 +101,7 @@ def _new_conn(self): **extra_kw ) - except SocketTimeout as e: + except SocketTimeout: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) diff --git a/pipenv/vendor/urllib3/fields.py b/pipenv/vendor/urllib3/fields.py index 37fe64a3e8..6a9a5a7f56 100644 --- a/pipenv/vendor/urllib3/fields.py +++ b/pipenv/vendor/urllib3/fields.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import email.utils import mimetypes +import re from .packages import six @@ -19,57 +20,147 @@ def guess_content_type(filename, default='application/octet-stream'): return default -def format_header_param(name, value): +def format_header_param_rfc2231(name, value): """ - Helper function to format and quote a single header parameter. + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2231, as - suggested by RFC 2388 Section 4.4. + non-ASCII values, like file names. This follows RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: - The value of the parameter, provided as a unicode string. + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + An RFC-2231-formatted unicode string. """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + if not any(ch in value for ch in '"\\\r\n'): - result = '%s="%s"' % (name, value) + result = u'%s="%s"' % (name, value) try: result.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result - if not six.PY3 and isinstance(value, six.text_type): # Python 2: + + if not six.PY3: # Python 2: value = value.encode('utf-8') + + # encode_rfc2231 accepts an encoded string and returns an ascii-encoded + # string in Python 2 but accepts and returns unicode strings in Python 3 value = email.utils.encode_rfc2231(value, 'utf-8') value = '%s*=%s' % (name, value) + + if not six.PY3: # Python 2: + value = value.decode('utf-8') + return value +_HTML5_REPLACEMENTS = { + u"\u0022": u"%22", + # Replace "\" with "\\". + u"\u005C": u"\u005C\u005C", + u"\u005C": u"\u005C\u005C", +} + +# All control characters from 0x00 to 0x1F *except* 0x1B. +_HTML5_REPLACEMENTS.update({ + six.unichr(cc): u"%{:02X}".format(cc) + for cc + in range(0x00, 0x1F+1) + if cc not in (0x1B,) +}) + + +def _replace_multiple(value, needles_and_replacements): + + def replacer(match): + return needles_and_replacements[match.group(0)] + + pattern = re.compile( + r"|".join([ + re.escape(needle) for needle in needles_and_replacements.keys() + ]) + ) + + result = pattern.sub(replacer, value) + + return result + + +def format_header_param_html5(name, value): + """ + Helper function to format and quote a single header parameter using the + HTML5 strategy. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows the `HTML5 Working Draft + Section 4.10.22.7`_ and matches the behavior of curl and modern browsers. + + .. _HTML5 Working Draft Section 4.10.22.7: + https://w3c.github.io/html/sec-forms.html#multipart-form-data + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + A unicode string, stripped of troublesome characters. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + value = _replace_multiple(value, _HTML5_REPLACEMENTS) + + return u'%s="%s"' % (name, value) + + +# For backwards-compatibility. +format_header_param = format_header_param_html5 + + class RequestField(object): """ A data container for request body parameters. :param name: - The name of this request field. + The name of this request field. Must be unicode. :param data: The data/value body. :param filename: - An optional filename of the request field. + An optional filename of the request field. Must be unicode. :param headers: An optional dict-like object of headers to initially use for the field. + :param header_formatter: + An optional callable that is used to encode and format the headers. By + default, this is :func:`format_header_param_html5`. """ - def __init__(self, name, data, filename=None, headers=None): + def __init__( + self, + name, + data, + filename=None, + headers=None, + header_formatter=format_header_param_html5): self._name = name self._filename = filename self.data = data self.headers = {} if headers: self.headers = dict(headers) + self.header_formatter = header_formatter @classmethod - def from_tuples(cls, fieldname, value): + def from_tuples( + cls, + fieldname, + value, + header_formatter=format_header_param_html5): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. @@ -97,21 +188,24 @@ def from_tuples(cls, fieldname, value): content_type = None data = value - request_param = cls(fieldname, data, filename=filename) + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter) request_param.make_multipart(content_type=content_type) return request_param def _render_part(self, name, value): """ - Overridable helper function to format a single header parameter. + Overridable helper function to format a single header parameter. By + default, this calls ``self.header_formatter``. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ - return format_header_param(name, value) + + return self.header_formatter(name, value) def _render_parts(self, header_parts): """ @@ -133,7 +227,7 @@ def _render_parts(self, header_parts): if value is not None: parts.append(self._render_part(name, value)) - return '; '.join(parts) + return u'; '.join(parts) def render_headers(self): """ @@ -144,15 +238,15 @@ def render_headers(self): sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] for sort_key in sort_keys: if self.headers.get(sort_key, False): - lines.append('%s: %s' % (sort_key, self.headers[sort_key])) + lines.append(u'%s: %s' % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: - lines.append('%s: %s' % (header_name, header_value)) + lines.append(u'%s: %s' % (header_name, header_value)) - lines.append('\r\n') - return '\r\n'.join(lines) + lines.append(u'\r\n') + return u'\r\n'.join(lines) def make_multipart(self, content_disposition=None, content_type=None, content_location=None): @@ -168,10 +262,10 @@ def make_multipart(self, content_disposition=None, content_type=None, The 'Content-Location' of the request body. """ - self.headers['Content-Disposition'] = content_disposition or 'form-data' - self.headers['Content-Disposition'] += '; '.join([ - '', self._render_parts( - (('name', self._name), ('filename', self._filename)) + self.headers['Content-Disposition'] = content_disposition or u'form-data' + self.headers['Content-Disposition'] += u'; '.join([ + u'', self._render_parts( + ((u'name', self._name), (u'filename', self._filename)) ) ]) self.headers['Content-Type'] = content_type diff --git a/pipenv/vendor/urllib3/packages/rfc3986/__init__.py b/pipenv/vendor/urllib3/packages/rfc3986/__init__.py new file mode 100644 index 0000000000..9d3c3bc92b --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +An implementation of semantics and validations described in RFC 3986. + +See http://rfc3986.readthedocs.io/ for detailed documentation. + +:copyright: (c) 2014 Rackspace +:license: Apache v2.0, see LICENSE for details +""" + +from .api import iri_reference +from .api import IRIReference +from .api import is_valid_uri +from .api import normalize_uri +from .api import uri_reference +from .api import URIReference +from .api import urlparse +from .parseresult import ParseResult + +__title__ = 'rfc3986' +__author__ = 'Ian Stapleton Cordasco' +__author_email__ = 'graffatcolmingov@gmail.com' +__license__ = 'Apache v2.0' +__copyright__ = 'Copyright 2014 Rackspace' +__version__ = '1.3.1' + +__all__ = ( + 'ParseResult', + 'URIReference', + 'IRIReference', + 'is_valid_uri', + 'normalize_uri', + 'uri_reference', + 'iri_reference', + 'urlparse', + '__title__', + '__author__', + '__author_email__', + '__license__', + '__copyright__', + '__version__', +) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/_mixin.py b/pipenv/vendor/urllib3/packages/rfc3986/_mixin.py new file mode 100644 index 0000000000..543925cdbc --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/_mixin.py @@ -0,0 +1,353 @@ +"""Module containing the implementation of the URIMixin class.""" +import warnings + +from . import exceptions as exc +from . import misc +from . import normalizers +from . import validators + + +class URIMixin(object): + """Mixin with all shared methods for URIs and IRIs.""" + + __hash__ = tuple.__hash__ + + def authority_info(self): + """Return a dictionary with the ``userinfo``, ``host``, and ``port``. + + If the authority is not valid, it will raise a + :class:`~rfc3986.exceptions.InvalidAuthority` Exception. + + :returns: + ``{'userinfo': 'username:password', 'host': 'www.example.com', + 'port': '80'}`` + :rtype: dict + :raises rfc3986.exceptions.InvalidAuthority: + If the authority is not ``None`` and can not be parsed. + """ + if not self.authority: + return {'userinfo': None, 'host': None, 'port': None} + + match = self._match_subauthority() + + if match is None: + # In this case, we have an authority that was parsed from the URI + # Reference, but it cannot be further parsed by our + # misc.SUBAUTHORITY_MATCHER. In this case it must not be a valid + # authority. + raise exc.InvalidAuthority(self.authority.encode(self.encoding)) + + # We had a match, now let's ensure that it is actually a valid host + # address if it is IPv4 + matches = match.groupdict() + host = matches.get('host') + + if (host and misc.IPv4_MATCHER.match(host) and not + validators.valid_ipv4_host_address(host)): + # If we have a host, it appears to be IPv4 and it does not have + # valid bytes, it is an InvalidAuthority. + raise exc.InvalidAuthority(self.authority.encode(self.encoding)) + + return matches + + def _match_subauthority(self): + return misc.SUBAUTHORITY_MATCHER.match(self.authority) + + @property + def host(self): + """If present, a string representing the host.""" + try: + authority = self.authority_info() + except exc.InvalidAuthority: + return None + return authority['host'] + + @property + def port(self): + """If present, the port extracted from the authority.""" + try: + authority = self.authority_info() + except exc.InvalidAuthority: + return None + return authority['port'] + + @property + def userinfo(self): + """If present, the userinfo extracted from the authority.""" + try: + authority = self.authority_info() + except exc.InvalidAuthority: + return None + return authority['userinfo'] + + def is_absolute(self): + """Determine if this URI Reference is an absolute URI. + + See http://tools.ietf.org/html/rfc3986#section-4.3 for explanation. + + :returns: ``True`` if it is an absolute URI, ``False`` otherwise. + :rtype: bool + """ + return bool(misc.ABSOLUTE_URI_MATCHER.match(self.unsplit())) + + def is_valid(self, **kwargs): + """Determine if the URI is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param bool require_scheme: Set to ``True`` if you wish to require the + presence of the scheme component. + :param bool require_authority: Set to ``True`` if you wish to require + the presence of the authority component. + :param bool require_path: Set to ``True`` if you wish to require the + presence of the path component. + :param bool require_query: Set to ``True`` if you wish to require the + presence of the query component. + :param bool require_fragment: Set to ``True`` if you wish to require + the presence of the fragment component. + :returns: ``True`` if the URI is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn("Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning) + validators = [ + (self.scheme_is_valid, kwargs.get('require_scheme', False)), + (self.authority_is_valid, kwargs.get('require_authority', False)), + (self.path_is_valid, kwargs.get('require_path', False)), + (self.query_is_valid, kwargs.get('require_query', False)), + (self.fragment_is_valid, kwargs.get('require_fragment', False)), + ] + return all(v(r) for v, r in validators) + + def authority_is_valid(self, require=False): + """Determine if the authority component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param bool require: + Set to ``True`` to require the presence of this component. + :returns: + ``True`` if the authority is valid. ``False`` otherwise. + :rtype: + bool + """ + warnings.warn("Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning) + try: + self.authority_info() + except exc.InvalidAuthority: + return False + + return validators.authority_is_valid( + self.authority, + host=self.host, + require=require, + ) + + def scheme_is_valid(self, require=False): + """Determine if the scheme component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the scheme is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn("Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning) + return validators.scheme_is_valid(self.scheme, require) + + def path_is_valid(self, require=False): + """Determine if the path component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the path is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn("Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning) + return validators.path_is_valid(self.path, require) + + def query_is_valid(self, require=False): + """Determine if the query component is valid. + + .. deprecated:: 1.1.0 + + Use the :class:`~rfc3986.validators.Validator` object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the query is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn("Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning) + return validators.query_is_valid(self.query, require) + + def fragment_is_valid(self, require=False): + """Determine if the fragment component is valid. + + .. deprecated:: 1.1.0 + + Use the Validator object instead. + + :param str require: Set to ``True`` to require the presence of this + component. + :returns: ``True`` if the fragment is valid. ``False`` otherwise. + :rtype: bool + """ + warnings.warn("Please use rfc3986.validators.Validator instead. " + "This method will be eventually removed.", + DeprecationWarning) + return validators.fragment_is_valid(self.fragment, require) + + def normalized_equality(self, other_ref): + """Compare this URIReference to another URIReference. + + :param URIReference other_ref: (required), The reference with which + we're comparing. + :returns: ``True`` if the references are equal, ``False`` otherwise. + :rtype: bool + """ + return tuple(self.normalize()) == tuple(other_ref.normalize()) + + def resolve_with(self, base_uri, strict=False): + """Use an absolute URI Reference to resolve this relative reference. + + Assuming this is a relative reference that you would like to resolve, + use the provided base URI to resolve it. + + See http://tools.ietf.org/html/rfc3986#section-5 for more information. + + :param base_uri: Either a string or URIReference. It must be an + absolute URI or it will raise an exception. + :returns: A new URIReference which is the result of resolving this + reference using ``base_uri``. + :rtype: :class:`URIReference` + :raises rfc3986.exceptions.ResolutionError: + If the ``base_uri`` is not an absolute URI. + """ + if not isinstance(base_uri, URIMixin): + base_uri = type(self).from_string(base_uri) + + if not base_uri.is_absolute(): + raise exc.ResolutionError(base_uri) + + # This is optional per + # http://tools.ietf.org/html/rfc3986#section-5.2.1 + base_uri = base_uri.normalize() + + # The reference we're resolving + resolving = self + + if not strict and resolving.scheme == base_uri.scheme: + resolving = resolving.copy_with(scheme=None) + + # http://tools.ietf.org/html/rfc3986#page-32 + if resolving.scheme is not None: + target = resolving.copy_with( + path=normalizers.normalize_path(resolving.path) + ) + else: + if resolving.authority is not None: + target = resolving.copy_with( + scheme=base_uri.scheme, + path=normalizers.normalize_path(resolving.path) + ) + else: + if resolving.path is None: + if resolving.query is not None: + query = resolving.query + else: + query = base_uri.query + target = resolving.copy_with( + scheme=base_uri.scheme, + authority=base_uri.authority, + path=base_uri.path, + query=query + ) + else: + if resolving.path.startswith('/'): + path = normalizers.normalize_path(resolving.path) + else: + path = normalizers.normalize_path( + misc.merge_paths(base_uri, resolving.path) + ) + target = resolving.copy_with( + scheme=base_uri.scheme, + authority=base_uri.authority, + path=path, + query=resolving.query + ) + return target + + def unsplit(self): + """Create a URI string from the components. + + :returns: The URI Reference reconstituted as a string. + :rtype: str + """ + # See http://tools.ietf.org/html/rfc3986#section-5.3 + result_list = [] + if self.scheme: + result_list.extend([self.scheme, ':']) + if self.authority: + result_list.extend(['//', self.authority]) + if self.path: + result_list.append(self.path) + if self.query is not None: + result_list.extend(['?', self.query]) + if self.fragment is not None: + result_list.extend(['#', self.fragment]) + return ''.join(result_list) + + def copy_with(self, scheme=misc.UseExisting, authority=misc.UseExisting, + path=misc.UseExisting, query=misc.UseExisting, + fragment=misc.UseExisting): + """Create a copy of this reference with the new components. + + :param str scheme: + (optional) The scheme to use for the new reference. + :param str authority: + (optional) The authority to use for the new reference. + :param str path: + (optional) The path to use for the new reference. + :param str query: + (optional) The query to use for the new reference. + :param str fragment: + (optional) The fragment to use for the new reference. + :returns: + New URIReference with provided components. + :rtype: + URIReference + """ + attributes = { + 'scheme': scheme, + 'authority': authority, + 'path': path, + 'query': query, + 'fragment': fragment, + } + for key, value in list(attributes.items()): + if value is misc.UseExisting: + del attributes[key] + uri = self._replace(**attributes) + uri.encoding = self.encoding + return uri diff --git a/pipenv/vendor/urllib3/packages/rfc3986/abnf_regexp.py b/pipenv/vendor/urllib3/packages/rfc3986/abnf_regexp.py new file mode 100644 index 0000000000..24c9c3d00a --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/abnf_regexp.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for the regular expressions crafted from ABNF.""" + +import sys + +# https://tools.ietf.org/html/rfc3986#page-13 +GEN_DELIMS = GENERIC_DELIMITERS = ":/?#[]@" +GENERIC_DELIMITERS_SET = set(GENERIC_DELIMITERS) +# https://tools.ietf.org/html/rfc3986#page-13 +SUB_DELIMS = SUB_DELIMITERS = "!$&'()*+,;=" +SUB_DELIMITERS_SET = set(SUB_DELIMITERS) +# Escape the '*' for use in regular expressions +SUB_DELIMITERS_RE = r"!$&'()\*+,;=" +RESERVED_CHARS_SET = GENERIC_DELIMITERS_SET.union(SUB_DELIMITERS_SET) +ALPHA = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' +DIGIT = '0123456789' +# https://tools.ietf.org/html/rfc3986#section-2.3 +UNRESERVED = UNRESERVED_CHARS = ALPHA + DIGIT + r'._!-' +UNRESERVED_CHARS_SET = set(UNRESERVED_CHARS) +NON_PCT_ENCODED_SET = RESERVED_CHARS_SET.union(UNRESERVED_CHARS_SET) +# We need to escape the '-' in this case: +UNRESERVED_RE = r'A-Za-z0-9._~\-' + +# Percent encoded character values +PERCENT_ENCODED = PCT_ENCODED = '%[A-Fa-f0-9]{2}' +PCHAR = '([' + UNRESERVED_RE + SUB_DELIMITERS_RE + ':@]|%s)' % PCT_ENCODED + +# NOTE(sigmavirus24): We're going to use more strict regular expressions +# than appear in Appendix B for scheme. This will prevent over-eager +# consuming of items that aren't schemes. +SCHEME_RE = '[a-zA-Z][a-zA-Z0-9+.-]*' +_AUTHORITY_RE = '[^/?#]*' +_PATH_RE = '[^?#]*' +_QUERY_RE = '[^#]*' +_FRAGMENT_RE = '.*' + +# Extracted from http://tools.ietf.org/html/rfc3986#appendix-B +COMPONENT_PATTERN_DICT = { + 'scheme': SCHEME_RE, + 'authority': _AUTHORITY_RE, + 'path': _PATH_RE, + 'query': _QUERY_RE, + 'fragment': _FRAGMENT_RE, +} + +# See http://tools.ietf.org/html/rfc3986#appendix-B +# In this case, we name each of the important matches so we can use +# SRE_Match#groupdict to parse the values out if we so choose. This is also +# modified to ignore other matches that are not important to the parsing of +# the reference so we can also simply use SRE_Match#groups. +URL_PARSING_RE = ( + r'(?:(?P{scheme}):)?(?://(?P{authority}))?' + r'(?P{path})(?:\?(?P{query}))?' + r'(?:#(?P{fragment}))?' +).format(**COMPONENT_PATTERN_DICT) + + +# ######################### +# Authority Matcher Section +# ######################### + +# Host patterns, see: http://tools.ietf.org/html/rfc3986#section-3.2.2 +# The pattern for a regular name, e.g., www.google.com, api.github.com +REGULAR_NAME_RE = REG_NAME = '((?:{0}|[{1}])*)'.format( + '%[0-9A-Fa-f]{2}', SUB_DELIMITERS_RE + UNRESERVED_RE +) +# The pattern for an IPv4 address, e.g., 192.168.255.255, 127.0.0.1, +IPv4_RE = r'([0-9]{1,3}\.){3}[0-9]{1,3}' +# Hexadecimal characters used in each piece of an IPv6 address +HEXDIG_RE = '[0-9A-Fa-f]{1,4}' +# Least-significant 32 bits of an IPv6 address +LS32_RE = '({hex}:{hex}|{ipv4})'.format(hex=HEXDIG_RE, ipv4=IPv4_RE) +# Substitutions into the following patterns for IPv6 patterns defined +# http://tools.ietf.org/html/rfc3986#page-20 +_subs = {'hex': HEXDIG_RE, 'ls32': LS32_RE} + +# Below: h16 = hexdig, see: https://tools.ietf.org/html/rfc5234 for details +# about ABNF (Augmented Backus-Naur Form) use in the comments +variations = [ + # 6( h16 ":" ) ls32 + '(%(hex)s:){6}%(ls32)s' % _subs, + # "::" 5( h16 ":" ) ls32 + '::(%(hex)s:){5}%(ls32)s' % _subs, + # [ h16 ] "::" 4( h16 ":" ) ls32 + '(%(hex)s)?::(%(hex)s:){4}%(ls32)s' % _subs, + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + '((%(hex)s:)?%(hex)s)?::(%(hex)s:){3}%(ls32)s' % _subs, + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + '((%(hex)s:){0,2}%(hex)s)?::(%(hex)s:){2}%(ls32)s' % _subs, + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + '((%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s' % _subs, + # [ *4( h16 ":" ) h16 ] "::" ls32 + '((%(hex)s:){0,4}%(hex)s)?::%(ls32)s' % _subs, + # [ *5( h16 ":" ) h16 ] "::" h16 + '((%(hex)s:){0,5}%(hex)s)?::%(hex)s' % _subs, + # [ *6( h16 ":" ) h16 ] "::" + '((%(hex)s:){0,6}%(hex)s)?::' % _subs, +] + +IPv6_RE = '(({0})|({1})|({2})|({3})|({4})|({5})|({6})|({7})|({8}))'.format( + *variations +) + +IPv_FUTURE_RE = r'v[0-9A-Fa-f]+\.[%s]+' % ( + UNRESERVED_RE + SUB_DELIMITERS_RE + ':' +) + +# RFC 6874 Zone ID ABNF +ZONE_ID = '(?:[' + UNRESERVED_RE + ']|' + PCT_ENCODED + ')+' + +IPv6_ADDRZ_RFC4007_RE = IPv6_RE + '(?:(?:%25|%)' + ZONE_ID + ')?' +IPv6_ADDRZ_RE = IPv6_RE + '(?:%25' + ZONE_ID + ')?' + +IP_LITERAL_RE = r'\[({0}|{1})\]'.format( + IPv6_ADDRZ_RFC4007_RE, + IPv_FUTURE_RE, +) + +# Pattern for matching the host piece of the authority +HOST_RE = HOST_PATTERN = '({0}|{1}|{2})'.format( + REG_NAME, + IPv4_RE, + IP_LITERAL_RE, +) +USERINFO_RE = '^([' + UNRESERVED_RE + SUB_DELIMITERS_RE + ':]|%s)+' % ( + PCT_ENCODED +) +PORT_RE = '[0-9]{1,5}' + +# #################### +# Path Matcher Section +# #################### + +# See http://tools.ietf.org/html/rfc3986#section-3.3 for more information +# about the path patterns defined below. +segments = { + 'segment': PCHAR + '*', + # Non-zero length segment + 'segment-nz': PCHAR + '+', + # Non-zero length segment without ":" + 'segment-nz-nc': PCHAR.replace(':', '') + '+' +} + +# Path types taken from Section 3.3 (linked above) +PATH_EMPTY = '^$' +PATH_ROOTLESS = '%(segment-nz)s(/%(segment)s)*' % segments +PATH_NOSCHEME = '%(segment-nz-nc)s(/%(segment)s)*' % segments +PATH_ABSOLUTE = '/(%s)?' % PATH_ROOTLESS +PATH_ABEMPTY = '(/%(segment)s)*' % segments +PATH_RE = '^(%s|%s|%s|%s|%s)$' % ( + PATH_ABEMPTY, PATH_ABSOLUTE, PATH_NOSCHEME, PATH_ROOTLESS, PATH_EMPTY +) + +FRAGMENT_RE = QUERY_RE = ( + '^([/?:@' + UNRESERVED_RE + SUB_DELIMITERS_RE + ']|%s)*$' % PCT_ENCODED +) + +# ########################## +# Relative reference matcher +# ########################## + +# See http://tools.ietf.org/html/rfc3986#section-4.2 for details +RELATIVE_PART_RE = '(//%s%s|%s|%s|%s)' % ( + COMPONENT_PATTERN_DICT['authority'], + PATH_ABEMPTY, + PATH_ABSOLUTE, + PATH_NOSCHEME, + PATH_EMPTY, +) + +# See http://tools.ietf.org/html/rfc3986#section-3 for definition +HIER_PART_RE = '(//%s%s|%s|%s|%s)' % ( + COMPONENT_PATTERN_DICT['authority'], + PATH_ABEMPTY, + PATH_ABSOLUTE, + PATH_ROOTLESS, + PATH_EMPTY, +) + +# ############### +# IRIs / RFC 3987 +# ############### + +# Only wide-unicode gets the high-ranges of UCSCHAR +if sys.maxunicode > 0xFFFF: # pragma: no cover + IPRIVATE = u'\uE000-\uF8FF\U000F0000-\U000FFFFD\U00100000-\U0010FFFD' + UCSCHAR_RE = ( + u'\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF' + u'\U00010000-\U0001FFFD\U00020000-\U0002FFFD' + u'\U00030000-\U0003FFFD\U00040000-\U0004FFFD' + u'\U00050000-\U0005FFFD\U00060000-\U0006FFFD' + u'\U00070000-\U0007FFFD\U00080000-\U0008FFFD' + u'\U00090000-\U0009FFFD\U000A0000-\U000AFFFD' + u'\U000B0000-\U000BFFFD\U000C0000-\U000CFFFD' + u'\U000D0000-\U000DFFFD\U000E1000-\U000EFFFD' + ) +else: # pragma: no cover + IPRIVATE = u'\uE000-\uF8FF' + UCSCHAR_RE = ( + u'\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF' + ) + +IUNRESERVED_RE = u'A-Za-z0-9\\._~\\-' + UCSCHAR_RE +IPCHAR = u'([' + IUNRESERVED_RE + SUB_DELIMITERS_RE + u':@]|%s)' % PCT_ENCODED + +isegments = { + 'isegment': IPCHAR + u'*', + # Non-zero length segment + 'isegment-nz': IPCHAR + u'+', + # Non-zero length segment without ":" + 'isegment-nz-nc': IPCHAR.replace(':', '') + u'+' +} + +IPATH_ROOTLESS = u'%(isegment-nz)s(/%(isegment)s)*' % isegments +IPATH_NOSCHEME = u'%(isegment-nz-nc)s(/%(isegment)s)*' % isegments +IPATH_ABSOLUTE = u'/(?:%s)?' % IPATH_ROOTLESS +IPATH_ABEMPTY = u'(?:/%(isegment)s)*' % isegments +IPATH_RE = u'^(?:%s|%s|%s|%s|%s)$' % ( + IPATH_ABEMPTY, IPATH_ABSOLUTE, IPATH_NOSCHEME, IPATH_ROOTLESS, PATH_EMPTY +) + +IREGULAR_NAME_RE = IREG_NAME = u'(?:{0}|[{1}])*'.format( + u'%[0-9A-Fa-f]{2}', SUB_DELIMITERS_RE + IUNRESERVED_RE +) + +IHOST_RE = IHOST_PATTERN = u'({0}|{1}|{2})'.format( + IREG_NAME, + IPv4_RE, + IP_LITERAL_RE, +) + +IUSERINFO_RE = u'^(?:[' + IUNRESERVED_RE + SUB_DELIMITERS_RE + u':]|%s)+' % ( + PCT_ENCODED +) + +IFRAGMENT_RE = (u'^(?:[/?:@' + IUNRESERVED_RE + SUB_DELIMITERS_RE + + u']|%s)*$' % PCT_ENCODED) +IQUERY_RE = (u'^(?:[/?:@' + IUNRESERVED_RE + SUB_DELIMITERS_RE + + IPRIVATE + u']|%s)*$' % PCT_ENCODED) + +IRELATIVE_PART_RE = u'(//%s%s|%s|%s|%s)' % ( + COMPONENT_PATTERN_DICT['authority'], + IPATH_ABEMPTY, + IPATH_ABSOLUTE, + IPATH_NOSCHEME, + PATH_EMPTY, +) + +IHIER_PART_RE = u'(//%s%s|%s|%s|%s)' % ( + COMPONENT_PATTERN_DICT['authority'], + IPATH_ABEMPTY, + IPATH_ABSOLUTE, + IPATH_ROOTLESS, + PATH_EMPTY, +) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/api.py b/pipenv/vendor/urllib3/packages/rfc3986/api.py new file mode 100644 index 0000000000..ddc4a1cd28 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/api.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Module containing the simple and functional API for rfc3986. + +This module defines functions and provides access to the public attributes +and classes of rfc3986. +""" + +from .iri import IRIReference +from .parseresult import ParseResult +from .uri import URIReference + + +def uri_reference(uri, encoding='utf-8'): + """Parse a URI string into a URIReference. + + This is a convenience function. You could achieve the same end by using + ``URIReference.from_string(uri)``. + + :param str uri: The URI which needs to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: A parsed URI + :rtype: :class:`URIReference` + """ + return URIReference.from_string(uri, encoding) + + +def iri_reference(iri, encoding='utf-8'): + """Parse a IRI string into an IRIReference. + + This is a convenience function. You could achieve the same end by using + ``IRIReference.from_string(iri)``. + + :param str iri: The IRI which needs to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: A parsed IRI + :rtype: :class:`IRIReference` + """ + return IRIReference.from_string(iri, encoding) + + +def is_valid_uri(uri, encoding='utf-8', **kwargs): + """Determine if the URI given is valid. + + This is a convenience function. You could use either + ``uri_reference(uri).is_valid()`` or + ``URIReference.from_string(uri).is_valid()`` to achieve the same result. + + :param str uri: The URI to be validated. + :param str encoding: The encoding of the string provided + :param bool require_scheme: Set to ``True`` if you wish to require the + presence of the scheme component. + :param bool require_authority: Set to ``True`` if you wish to require the + presence of the authority component. + :param bool require_path: Set to ``True`` if you wish to require the + presence of the path component. + :param bool require_query: Set to ``True`` if you wish to require the + presence of the query component. + :param bool require_fragment: Set to ``True`` if you wish to require the + presence of the fragment component. + :returns: ``True`` if the URI is valid, ``False`` otherwise. + :rtype: bool + """ + return URIReference.from_string(uri, encoding).is_valid(**kwargs) + + +def normalize_uri(uri, encoding='utf-8'): + """Normalize the given URI. + + This is a convenience function. You could use either + ``uri_reference(uri).normalize().unsplit()`` or + ``URIReference.from_string(uri).normalize().unsplit()`` instead. + + :param str uri: The URI to be normalized. + :param str encoding: The encoding of the string provided + :returns: The normalized URI. + :rtype: str + """ + normalized_reference = URIReference.from_string(uri, encoding).normalize() + return normalized_reference.unsplit() + + +def urlparse(uri, encoding='utf-8'): + """Parse a given URI and return a ParseResult. + + This is a partial replacement of the standard library's urlparse function. + + :param str uri: The URI to be parsed. + :param str encoding: The encoding of the string provided. + :returns: A parsed URI + :rtype: :class:`~rfc3986.parseresult.ParseResult` + """ + return ParseResult.from_string(uri, encoding, strict=False) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/builder.py b/pipenv/vendor/urllib3/packages/rfc3986/builder.py new file mode 100644 index 0000000000..7934279995 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/builder.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module containing the logic for the URIBuilder object.""" +from . import compat +from . import normalizers +from . import uri + + +class URIBuilder(object): + """Object to aid in building up a URI Reference from parts. + + .. note:: + + This object should be instantiated by the user, but it's recommended + that it is not provided with arguments. Instead, use the available + method to populate the fields. + + """ + + def __init__(self, scheme=None, userinfo=None, host=None, port=None, + path=None, query=None, fragment=None): + """Initialize our URI builder. + + :param str scheme: + (optional) + :param str userinfo: + (optional) + :param str host: + (optional) + :param int port: + (optional) + :param str path: + (optional) + :param str query: + (optional) + :param str fragment: + (optional) + """ + self.scheme = scheme + self.userinfo = userinfo + self.host = host + self.port = port + self.path = path + self.query = query + self.fragment = fragment + + def __repr__(self): + """Provide a convenient view of our builder object.""" + formatstr = ('URIBuilder(scheme={b.scheme}, userinfo={b.userinfo}, ' + 'host={b.host}, port={b.port}, path={b.path}, ' + 'query={b.query}, fragment={b.fragment})') + return formatstr.format(b=self) + + def add_scheme(self, scheme): + """Add a scheme to our builder object. + + After normalizing, this will generate a new URIBuilder instance with + the specified scheme and all other attributes the same. + + .. code-block:: python + + >>> URIBuilder().add_scheme('HTTPS') + URIBuilder(scheme='https', userinfo=None, host=None, port=None, + path=None, query=None, fragment=None) + + """ + scheme = normalizers.normalize_scheme(scheme) + return URIBuilder( + scheme=scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_credentials(self, username, password): + """Add credentials as the userinfo portion of the URI. + + .. code-block:: python + + >>> URIBuilder().add_credentials('root', 's3crete') + URIBuilder(scheme=None, userinfo='root:s3crete', host=None, + port=None, path=None, query=None, fragment=None) + + >>> URIBuilder().add_credentials('root', None) + URIBuilder(scheme=None, userinfo='root', host=None, + port=None, path=None, query=None, fragment=None) + """ + if username is None: + raise ValueError('Username cannot be None') + userinfo = normalizers.normalize_username(username) + + if password is not None: + userinfo = '{}:{}'.format( + userinfo, + normalizers.normalize_password(password), + ) + + return URIBuilder( + scheme=self.scheme, + userinfo=userinfo, + host=self.host, + port=self.port, + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_host(self, host): + """Add hostname to the URI. + + .. code-block:: python + + >>> URIBuilder().add_host('google.com') + URIBuilder(scheme=None, userinfo=None, host='google.com', + port=None, path=None, query=None, fragment=None) + + """ + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=normalizers.normalize_host(host), + port=self.port, + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_port(self, port): + """Add port to the URI. + + .. code-block:: python + + >>> URIBuilder().add_port(80) + URIBuilder(scheme=None, userinfo=None, host=None, port='80', + path=None, query=None, fragment=None) + + >>> URIBuilder().add_port(443) + URIBuilder(scheme=None, userinfo=None, host=None, port='443', + path=None, query=None, fragment=None) + + """ + port_int = int(port) + if port_int < 0: + raise ValueError( + 'ports are not allowed to be negative. You provided {}'.format( + port_int, + ) + ) + if port_int > 65535: + raise ValueError( + 'ports are not allowed to be larger than 65535. ' + 'You provided {}'.format( + port_int, + ) + ) + + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port='{}'.format(port_int), + path=self.path, + query=self.query, + fragment=self.fragment, + ) + + def add_path(self, path): + """Add a path to the URI. + + .. code-block:: python + + >>> URIBuilder().add_path('sigmavirus24/rfc3985') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/sigmavirus24/rfc3986', query=None, fragment=None) + + >>> URIBuilder().add_path('/checkout.php') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path='/checkout.php', query=None, fragment=None) + + """ + if not path.startswith('/'): + path = '/{}'.format(path) + + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=normalizers.normalize_path(path), + query=self.query, + fragment=self.fragment, + ) + + def add_query_from(self, query_items): + """Generate and add a query a dictionary or list of tuples. + + .. code-block:: python + + >>> URIBuilder().add_query_from({'a': 'b c'}) + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b+c', fragment=None) + + >>> URIBuilder().add_query_from([('a', 'b c')]) + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b+c', fragment=None) + + """ + query = normalizers.normalize_query(compat.urlencode(query_items)) + + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=query, + fragment=self.fragment, + ) + + def add_query(self, query): + """Add a pre-formated query string to the URI. + + .. code-block:: python + + >>> URIBuilder().add_query('a=b&c=d') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query='a=b&c=d', fragment=None) + + """ + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=normalizers.normalize_query(query), + fragment=self.fragment, + ) + + def add_fragment(self, fragment): + """Add a fragment to the URI. + + .. code-block:: python + + >>> URIBuilder().add_fragment('section-2.6.1') + URIBuilder(scheme=None, userinfo=None, host=None, port=None, + path=None, query=None, fragment='section-2.6.1') + + """ + return URIBuilder( + scheme=self.scheme, + userinfo=self.userinfo, + host=self.host, + port=self.port, + path=self.path, + query=self.query, + fragment=normalizers.normalize_fragment(fragment), + ) + + def finalize(self): + """Create a URIReference from our builder. + + .. code-block:: python + + >>> URIBuilder().add_scheme('https').add_host('github.com' + ... ).add_path('sigmavirus24/rfc3986').finalize().unsplit() + 'https://github.com/sigmavirus24/rfc3986' + + >>> URIBuilder().add_scheme('https').add_host('github.com' + ... ).add_path('sigmavirus24/rfc3986').add_credentials( + ... 'sigmavirus24', 'not-re@l').finalize().unsplit() + 'https://sigmavirus24:not-re%40l@github.com/sigmavirus24/rfc3986' + + """ + return uri.URIReference( + self.scheme, + normalizers.normalize_authority( + (self.userinfo, self.host, self.port) + ), + self.path, + self.query, + self.fragment, + ) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/compat.py b/pipenv/vendor/urllib3/packages/rfc3986/compat.py new file mode 100644 index 0000000000..8968c38437 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/compat.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Compatibility module for Python 2 and 3 support.""" +import sys + +try: + from urllib.parse import quote as urlquote +except ImportError: # Python 2.x + from urllib import quote as urlquote + +try: + from urllib.parse import urlencode +except ImportError: # Python 2.x + from urllib import urlencode + +__all__ = ( + 'to_bytes', + 'to_str', + 'urlquote', + 'urlencode', +) + +PY3 = (3, 0) <= sys.version_info < (4, 0) +PY2 = (2, 6) <= sys.version_info < (2, 8) + + +if PY3: + unicode = str # Python 3.x + + +def to_str(b, encoding='utf-8'): + """Ensure that b is text in the specified encoding.""" + if hasattr(b, 'decode') and not isinstance(b, unicode): + b = b.decode(encoding) + return b + + +def to_bytes(s, encoding='utf-8'): + """Ensure that s is converted to bytes from the encoding.""" + if hasattr(s, 'encode') and not isinstance(s, bytes): + s = s.encode(encoding) + return s diff --git a/pipenv/vendor/urllib3/packages/rfc3986/exceptions.py b/pipenv/vendor/urllib3/packages/rfc3986/exceptions.py new file mode 100644 index 0000000000..da8ca7cb1f --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/exceptions.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +"""Exceptions module for rfc3986.""" + +from . import compat + + +class RFC3986Exception(Exception): + """Base class for all rfc3986 exception classes.""" + + pass + + +class InvalidAuthority(RFC3986Exception): + """Exception when the authority string is invalid.""" + + def __init__(self, authority): + """Initialize the exception with the invalid authority.""" + super(InvalidAuthority, self).__init__( + u"The authority ({0}) is not valid.".format( + compat.to_str(authority))) + + +class InvalidPort(RFC3986Exception): + """Exception when the port is invalid.""" + + def __init__(self, port): + """Initialize the exception with the invalid port.""" + super(InvalidPort, self).__init__( + 'The port ("{0}") is not valid.'.format(port)) + + +class ResolutionError(RFC3986Exception): + """Exception to indicate a failure to resolve a URI.""" + + def __init__(self, uri): + """Initialize the error with the failed URI.""" + super(ResolutionError, self).__init__( + "{0} is not an absolute URI.".format(uri.unsplit())) + + +class ValidationError(RFC3986Exception): + """Exception raised during Validation of a URI.""" + + pass + + +class MissingComponentError(ValidationError): + """Exception raised when a required component is missing.""" + + def __init__(self, uri, *component_names): + """Initialize the error with the missing component name.""" + verb = 'was' + if len(component_names) > 1: + verb = 'were' + + self.uri = uri + self.components = sorted(component_names) + components = ', '.join(self.components) + super(MissingComponentError, self).__init__( + "{} {} required but missing".format(components, verb), + uri, + self.components, + ) + + +class UnpermittedComponentError(ValidationError): + """Exception raised when a component has an unpermitted value.""" + + def __init__(self, component_name, component_value, allowed_values): + """Initialize the error with the unpermitted component.""" + super(UnpermittedComponentError, self).__init__( + "{} was required to be one of {!r} but was {!r}".format( + component_name, list(sorted(allowed_values)), component_value, + ), + component_name, + component_value, + allowed_values, + ) + self.component_name = component_name + self.component_value = component_value + self.allowed_values = allowed_values + + +class PasswordForbidden(ValidationError): + """Exception raised when a URL has a password in the userinfo section.""" + + def __init__(self, uri): + """Initialize the error with the URI that failed validation.""" + unsplit = getattr(uri, 'unsplit', lambda: uri) + super(PasswordForbidden, self).__init__( + '"{}" contained a password when validation forbade it'.format( + unsplit() + ) + ) + self.uri = uri + + +class InvalidComponentsError(ValidationError): + """Exception raised when one or more components are invalid.""" + + def __init__(self, uri, *component_names): + """Initialize the error with the invalid component name(s).""" + verb = 'was' + if len(component_names) > 1: + verb = 'were' + + self.uri = uri + self.components = sorted(component_names) + components = ', '.join(self.components) + super(InvalidComponentsError, self).__init__( + "{} {} found to be invalid".format(components, verb), + uri, + self.components, + ) + + +class MissingDependencyError(RFC3986Exception): + """Exception raised when an IRI is encoded without the 'idna' module.""" diff --git a/pipenv/vendor/urllib3/packages/rfc3986/iri.py b/pipenv/vendor/urllib3/packages/rfc3986/iri.py new file mode 100644 index 0000000000..9c01fe1cd0 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/iri.py @@ -0,0 +1,147 @@ +"""Module containing the implementation of the IRIReference class.""" +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Copyright (c) 2015 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import namedtuple + +from . import compat +from . import exceptions +from . import misc +from . import normalizers +from . import uri + + +try: + import idna +except ImportError: # pragma: no cover + idna = None + + +class IRIReference(namedtuple('IRIReference', misc.URI_COMPONENTS), + uri.URIMixin): + """Immutable object representing a parsed IRI Reference. + + Can be encoded into an URIReference object via the procedure + specified in RFC 3987 Section 3.1 + + .. note:: + The IRI submodule is a new interface and may possibly change in + the future. Check for changes to the interface when upgrading. + """ + + slots = () + + def __new__(cls, scheme, authority, path, query, fragment, + encoding='utf-8'): + """Create a new IRIReference.""" + ref = super(IRIReference, cls).__new__( + cls, + scheme or None, + authority or None, + path or None, + query, + fragment) + ref.encoding = encoding + return ref + + def __eq__(self, other): + """Compare this reference to another.""" + other_ref = other + if isinstance(other, tuple): + other_ref = self.__class__(*other) + elif not isinstance(other, IRIReference): + try: + other_ref = self.__class__.from_string(other) + except TypeError: + raise TypeError( + 'Unable to compare {0}() to {1}()'.format( + type(self).__name__, type(other).__name__)) + + # See http://tools.ietf.org/html/rfc3986#section-6.2 + return tuple(self) == tuple(other_ref) + + def _match_subauthority(self): + return misc.ISUBAUTHORITY_MATCHER.match(self.authority) + + @classmethod + def from_string(cls, iri_string, encoding='utf-8'): + """Parse a IRI reference from the given unicode IRI string. + + :param str iri_string: Unicode IRI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: :class:`IRIReference` or subclass thereof + """ + iri_string = compat.to_str(iri_string, encoding) + + split_iri = misc.IRI_MATCHER.match(iri_string).groupdict() + return cls( + split_iri['scheme'], split_iri['authority'], + normalizers.encode_component(split_iri['path'], encoding), + normalizers.encode_component(split_iri['query'], encoding), + normalizers.encode_component(split_iri['fragment'], encoding), + encoding, + ) + + def encode(self, idna_encoder=None): # noqa: C901 + """Encode an IRIReference into a URIReference instance. + + If the ``idna`` module is installed or the ``rfc3986[idna]`` + extra is used then unicode characters in the IRI host + component will be encoded with IDNA2008. + + :param idna_encoder: + Function that encodes each part of the host component + If not given will raise an exception if the IRI + contains a host component. + :rtype: uri.URIReference + :returns: A URI reference + """ + authority = self.authority + if authority: + if idna_encoder is None: + if idna is None: # pragma: no cover + raise exceptions.MissingDependencyError( + "Could not import the 'idna' module " + "and the IRI hostname requires encoding" + ) + + def idna_encoder(name): + if any(ord(c) > 128 for c in name): + try: + return idna.encode(name.lower(), + strict=True, + std3_rules=True) + except idna.IDNAError: + raise exceptions.InvalidAuthority(self.authority) + return name + + authority = "" + if self.host: + authority = ".".join([compat.to_str(idna_encoder(part)) + for part in self.host.split(".")]) + + if self.userinfo is not None: + authority = (normalizers.encode_component( + self.userinfo, self.encoding) + '@' + authority) + + if self.port is not None: + authority += ":" + str(self.port) + + return uri.URIReference(self.scheme, + authority, + path=self.path, + query=self.query, + fragment=self.fragment, + encoding=self.encoding) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/misc.py b/pipenv/vendor/urllib3/packages/rfc3986/misc.py new file mode 100644 index 0000000000..00f9f3b94d --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/misc.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Module containing compiled regular expressions and constants. + +This module contains important constants, patterns, and compiled regular +expressions for parsing and validating URIs and their components. +""" + +import re + +from . import abnf_regexp + +# These are enumerated for the named tuple used as a superclass of +# URIReference +URI_COMPONENTS = ['scheme', 'authority', 'path', 'query', 'fragment'] + +important_characters = { + 'generic_delimiters': abnf_regexp.GENERIC_DELIMITERS, + 'sub_delimiters': abnf_regexp.SUB_DELIMITERS, + # We need to escape the '*' in this case + 're_sub_delimiters': abnf_regexp.SUB_DELIMITERS_RE, + 'unreserved_chars': abnf_regexp.UNRESERVED_CHARS, + # We need to escape the '-' in this case: + 're_unreserved': abnf_regexp.UNRESERVED_RE, +} + +# For details about delimiters and reserved characters, see: +# http://tools.ietf.org/html/rfc3986#section-2.2 +GENERIC_DELIMITERS = abnf_regexp.GENERIC_DELIMITERS_SET +SUB_DELIMITERS = abnf_regexp.SUB_DELIMITERS_SET +RESERVED_CHARS = abnf_regexp.RESERVED_CHARS_SET +# For details about unreserved characters, see: +# http://tools.ietf.org/html/rfc3986#section-2.3 +UNRESERVED_CHARS = abnf_regexp.UNRESERVED_CHARS_SET +NON_PCT_ENCODED = abnf_regexp.NON_PCT_ENCODED_SET + +URI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE) + +SUBAUTHORITY_MATCHER = re.compile(( + '^(?:(?P{0})@)?' # userinfo + '(?P{1})' # host + ':?(?P{2})?$' # port + ).format(abnf_regexp.USERINFO_RE, + abnf_regexp.HOST_PATTERN, + abnf_regexp.PORT_RE)) + + +HOST_MATCHER = re.compile('^' + abnf_regexp.HOST_RE + '$') +IPv4_MATCHER = re.compile('^' + abnf_regexp.IPv4_RE + '$') +IPv6_MATCHER = re.compile(r'^\[' + abnf_regexp.IPv6_ADDRZ_RFC4007_RE + r'\]$') + +# Used by host validator +IPv6_NO_RFC4007_MATCHER = re.compile(r'^\[%s\]$' % ( + abnf_regexp.IPv6_ADDRZ_RE +)) + +# Matcher used to validate path components +PATH_MATCHER = re.compile(abnf_regexp.PATH_RE) + + +# ################################## +# Query and Fragment Matcher Section +# ################################## + +QUERY_MATCHER = re.compile(abnf_regexp.QUERY_RE) + +FRAGMENT_MATCHER = QUERY_MATCHER + +# Scheme validation, see: http://tools.ietf.org/html/rfc3986#section-3.1 +SCHEME_MATCHER = re.compile('^{0}$'.format(abnf_regexp.SCHEME_RE)) + +RELATIVE_REF_MATCHER = re.compile(r'^%s(\?%s)?(#%s)?$' % ( + abnf_regexp.RELATIVE_PART_RE, + abnf_regexp.QUERY_RE, + abnf_regexp.FRAGMENT_RE, +)) + +# See http://tools.ietf.org/html/rfc3986#section-4.3 +ABSOLUTE_URI_MATCHER = re.compile(r'^%s:%s(\?%s)?$' % ( + abnf_regexp.COMPONENT_PATTERN_DICT['scheme'], + abnf_regexp.HIER_PART_RE, + abnf_regexp.QUERY_RE[1:-1], +)) + +# ############### +# IRIs / RFC 3987 +# ############### + +IRI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE, re.UNICODE) + +ISUBAUTHORITY_MATCHER = re.compile(( + u'^(?:(?P{0})@)?' # iuserinfo + u'(?P{1})' # ihost + u':?(?P{2})?$' # port + ).format(abnf_regexp.IUSERINFO_RE, + abnf_regexp.IHOST_RE, + abnf_regexp.PORT_RE), re.UNICODE) + + +IHOST_MATCHER = re.compile('^' + abnf_regexp.IHOST_RE + '$', re.UNICODE) + +IPATH_MATCHER = re.compile(abnf_regexp.IPATH_RE, re.UNICODE) + +IQUERY_MATCHER = re.compile(abnf_regexp.IQUERY_RE, re.UNICODE) + +IFRAGMENT_MATCHER = re.compile(abnf_regexp.IFRAGMENT_RE, re.UNICODE) + + +RELATIVE_IRI_MATCHER = re.compile(u'^%s(?:\\?%s)?(?:%s)?$' % ( + abnf_regexp.IRELATIVE_PART_RE, + abnf_regexp.IQUERY_RE, + abnf_regexp.IFRAGMENT_RE +), re.UNICODE) + +ABSOLUTE_IRI_MATCHER = re.compile(u'^%s:%s(?:\\?%s)?$' % ( + abnf_regexp.COMPONENT_PATTERN_DICT['scheme'], + abnf_regexp.IHIER_PART_RE, + abnf_regexp.IQUERY_RE[1:-1] +), re.UNICODE) + + +# Path merger as defined in http://tools.ietf.org/html/rfc3986#section-5.2.3 +def merge_paths(base_uri, relative_path): + """Merge a base URI's path with a relative URI's path.""" + if base_uri.path is None and base_uri.authority is not None: + return '/' + relative_path + else: + path = base_uri.path or '' + index = path.rfind('/') + return path[:index] + '/' + relative_path + + +UseExisting = object() diff --git a/pipenv/vendor/urllib3/packages/rfc3986/normalizers.py b/pipenv/vendor/urllib3/packages/rfc3986/normalizers.py new file mode 100644 index 0000000000..2eb1bb36f7 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/normalizers.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module with functions to normalize components.""" +import re + +from . import compat +from . import misc + + +def normalize_scheme(scheme): + """Normalize the scheme component.""" + return scheme.lower() + + +def normalize_authority(authority): + """Normalize an authority tuple to a string.""" + userinfo, host, port = authority + result = '' + if userinfo: + result += normalize_percent_characters(userinfo) + '@' + if host: + result += normalize_host(host) + if port: + result += ':' + port + return result + + +def normalize_username(username): + """Normalize a username to make it safe to include in userinfo.""" + return compat.urlquote(username) + + +def normalize_password(password): + """Normalize a password to make safe for userinfo.""" + return compat.urlquote(password) + + +def normalize_host(host): + """Normalize a host string.""" + if misc.IPv6_MATCHER.match(host): + percent = host.find('%') + if percent != -1: + percent_25 = host.find('%25') + + # Replace RFC 4007 IPv6 Zone ID delimiter '%' with '%25' + # from RFC 6874. If the host is '[%25]' then we + # assume RFC 4007 and normalize to '[%2525]' + if percent_25 == -1 or percent < percent_25 or \ + (percent == percent_25 and percent_25 == len(host) - 4): + host = host.replace('%', '%25', 1) + + # Don't normalize the casing of the Zone ID + return host[:percent].lower() + host[percent:] + + return host.lower() + + +def normalize_path(path): + """Normalize the path string.""" + if not path: + return path + + path = normalize_percent_characters(path) + return remove_dot_segments(path) + + +def normalize_query(query): + """Normalize the query string.""" + if not query: + return query + return normalize_percent_characters(query) + + +def normalize_fragment(fragment): + """Normalize the fragment string.""" + if not fragment: + return fragment + return normalize_percent_characters(fragment) + + +PERCENT_MATCHER = re.compile('%[A-Fa-f0-9]{2}') + + +def normalize_percent_characters(s): + """All percent characters should be upper-cased. + + For example, ``"%3afoo%DF%ab"`` should be turned into ``"%3Afoo%DF%AB"``. + """ + matches = set(PERCENT_MATCHER.findall(s)) + for m in matches: + if not m.isupper(): + s = s.replace(m, m.upper()) + return s + + +def remove_dot_segments(s): + """Remove dot segments from the string. + + See also Section 5.2.4 of :rfc:`3986`. + """ + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = s.split('/') # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == '.': + continue + # Anything other than '..', should be appended to the output + elif segment != '..': + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if s.startswith('/') and (not output or output[0]): + output.insert(0, '') + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if s.endswith(('/.', '/..')): + output.append('') + + return '/'.join(output) + + +def encode_component(uri_component, encoding): + """Encode the specific component in the provided encoding.""" + if uri_component is None: + return uri_component + + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + percent_encodings = len(PERCENT_MATCHER.findall( + compat.to_str(uri_component, encoding))) + + uri_bytes = compat.to_bytes(uri_component, encoding) + is_percent_encoded = percent_encodings == uri_bytes.count(b'%') + + encoded_uri = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i:i+1] + byte_ord = ord(byte) + if ((is_percent_encoded and byte == b'%') + or (byte_ord < 128 and byte.decode() in misc.NON_PCT_ENCODED)): + encoded_uri.extend(byte) + continue + encoded_uri.extend('%{0:02x}'.format(byte_ord).encode().upper()) + + return encoded_uri.decode(encoding) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/parseresult.py b/pipenv/vendor/urllib3/packages/rfc3986/parseresult.py new file mode 100644 index 0000000000..0a73456693 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/parseresult.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2015 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module containing the urlparse compatibility logic.""" +from collections import namedtuple + +from . import compat +from . import exceptions +from . import misc +from . import normalizers +from . import uri + +__all__ = ('ParseResult', 'ParseResultBytes') + +PARSED_COMPONENTS = ('scheme', 'userinfo', 'host', 'port', 'path', 'query', + 'fragment') + + +class ParseResultMixin(object): + def _generate_authority(self, attributes): + # I swear I did not align the comparisons below. That's just how they + # happened to align based on pep8 and attribute lengths. + userinfo, host, port = (attributes[p] + for p in ('userinfo', 'host', 'port')) + if (self.userinfo != userinfo or + self.host != host or + self.port != port): + if port: + port = '{0}'.format(port) + return normalizers.normalize_authority( + (compat.to_str(userinfo, self.encoding), + compat.to_str(host, self.encoding), + port) + ) + return self.authority + + def geturl(self): + """Shim to match the standard library method.""" + return self.unsplit() + + @property + def hostname(self): + """Shim to match the standard library.""" + return self.host + + @property + def netloc(self): + """Shim to match the standard library.""" + return self.authority + + @property + def params(self): + """Shim to match the standard library.""" + return self.query + + +class ParseResult(namedtuple('ParseResult', PARSED_COMPONENTS), + ParseResultMixin): + """Implementation of urlparse compatibility class. + + This uses the URIReference logic to handle compatibility with the + urlparse.ParseResult class. + """ + + slots = () + + def __new__(cls, scheme, userinfo, host, port, path, query, fragment, + uri_ref, encoding='utf-8'): + """Create a new ParseResult.""" + parse_result = super(ParseResult, cls).__new__( + cls, + scheme or None, + userinfo or None, + host, + port or None, + path or None, + query, + fragment) + parse_result.encoding = encoding + parse_result.reference = uri_ref + return parse_result + + @classmethod + def from_parts(cls, scheme=None, userinfo=None, host=None, port=None, + path=None, query=None, fragment=None, encoding='utf-8'): + """Create a ParseResult instance from its parts.""" + authority = '' + if userinfo is not None: + authority += userinfo + '@' + if host is not None: + authority += host + if port is not None: + authority += ':{0}'.format(port) + uri_ref = uri.URIReference(scheme=scheme, + authority=authority, + path=path, + query=query, + fragment=fragment, + encoding=encoding).normalize() + userinfo, host, port = authority_from(uri_ref, strict=True) + return cls(scheme=uri_ref.scheme, + userinfo=userinfo, + host=host, + port=port, + path=uri_ref.path, + query=uri_ref.query, + fragment=uri_ref.fragment, + uri_ref=uri_ref, + encoding=encoding) + + @classmethod + def from_string(cls, uri_string, encoding='utf-8', strict=True, + lazy_normalize=True): + """Parse a URI from the given unicode URI string. + + :param str uri_string: Unicode URI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :param bool strict: Parse strictly according to :rfc:`3986` if True. + If False, parse similarly to the standard library's urlparse + function. + :returns: :class:`ParseResult` or subclass thereof + """ + reference = uri.URIReference.from_string(uri_string, encoding) + if not lazy_normalize: + reference = reference.normalize() + userinfo, host, port = authority_from(reference, strict) + + return cls(scheme=reference.scheme, + userinfo=userinfo, + host=host, + port=port, + path=reference.path, + query=reference.query, + fragment=reference.fragment, + uri_ref=reference, + encoding=encoding) + + @property + def authority(self): + """Return the normalized authority.""" + return self.reference.authority + + def copy_with(self, scheme=misc.UseExisting, userinfo=misc.UseExisting, + host=misc.UseExisting, port=misc.UseExisting, + path=misc.UseExisting, query=misc.UseExisting, + fragment=misc.UseExisting): + """Create a copy of this instance replacing with specified parts.""" + attributes = zip(PARSED_COMPONENTS, + (scheme, userinfo, host, port, path, query, fragment)) + attrs_dict = {} + for name, value in attributes: + if value is misc.UseExisting: + value = getattr(self, name) + attrs_dict[name] = value + authority = self._generate_authority(attrs_dict) + ref = self.reference.copy_with(scheme=attrs_dict['scheme'], + authority=authority, + path=attrs_dict['path'], + query=attrs_dict['query'], + fragment=attrs_dict['fragment']) + return ParseResult(uri_ref=ref, encoding=self.encoding, **attrs_dict) + + def encode(self, encoding=None): + """Convert to an instance of ParseResultBytes.""" + encoding = encoding or self.encoding + attrs = dict( + zip(PARSED_COMPONENTS, + (attr.encode(encoding) if hasattr(attr, 'encode') else attr + for attr in self))) + return ParseResultBytes( + uri_ref=self.reference, + encoding=encoding, + **attrs + ) + + def unsplit(self, use_idna=False): + """Create a URI string from the components. + + :returns: The parsed URI reconstituted as a string. + :rtype: str + """ + parse_result = self + if use_idna and self.host: + hostbytes = self.host.encode('idna') + host = hostbytes.decode(self.encoding) + parse_result = self.copy_with(host=host) + return parse_result.reference.unsplit() + + +class ParseResultBytes(namedtuple('ParseResultBytes', PARSED_COMPONENTS), + ParseResultMixin): + """Compatibility shim for the urlparse.ParseResultBytes object.""" + + def __new__(cls, scheme, userinfo, host, port, path, query, fragment, + uri_ref, encoding='utf-8', lazy_normalize=True): + """Create a new ParseResultBytes instance.""" + parse_result = super(ParseResultBytes, cls).__new__( + cls, + scheme or None, + userinfo or None, + host, + port or None, + path or None, + query or None, + fragment or None) + parse_result.encoding = encoding + parse_result.reference = uri_ref + parse_result.lazy_normalize = lazy_normalize + return parse_result + + @classmethod + def from_parts(cls, scheme=None, userinfo=None, host=None, port=None, + path=None, query=None, fragment=None, encoding='utf-8', + lazy_normalize=True): + """Create a ParseResult instance from its parts.""" + authority = '' + if userinfo is not None: + authority += userinfo + '@' + if host is not None: + authority += host + if port is not None: + authority += ':{0}'.format(int(port)) + uri_ref = uri.URIReference(scheme=scheme, + authority=authority, + path=path, + query=query, + fragment=fragment, + encoding=encoding) + if not lazy_normalize: + uri_ref = uri_ref.normalize() + to_bytes = compat.to_bytes + userinfo, host, port = authority_from(uri_ref, strict=True) + return cls(scheme=to_bytes(scheme, encoding), + userinfo=to_bytes(userinfo, encoding), + host=to_bytes(host, encoding), + port=port, + path=to_bytes(path, encoding), + query=to_bytes(query, encoding), + fragment=to_bytes(fragment, encoding), + uri_ref=uri_ref, + encoding=encoding, + lazy_normalize=lazy_normalize) + + @classmethod + def from_string(cls, uri_string, encoding='utf-8', strict=True, + lazy_normalize=True): + """Parse a URI from the given unicode URI string. + + :param str uri_string: Unicode URI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :param bool strict: Parse strictly according to :rfc:`3986` if True. + If False, parse similarly to the standard library's urlparse + function. + :returns: :class:`ParseResultBytes` or subclass thereof + """ + reference = uri.URIReference.from_string(uri_string, encoding) + if not lazy_normalize: + reference = reference.normalize() + userinfo, host, port = authority_from(reference, strict) + + to_bytes = compat.to_bytes + return cls(scheme=to_bytes(reference.scheme, encoding), + userinfo=to_bytes(userinfo, encoding), + host=to_bytes(host, encoding), + port=port, + path=to_bytes(reference.path, encoding), + query=to_bytes(reference.query, encoding), + fragment=to_bytes(reference.fragment, encoding), + uri_ref=reference, + encoding=encoding, + lazy_normalize=lazy_normalize) + + @property + def authority(self): + """Return the normalized authority.""" + return self.reference.authority.encode(self.encoding) + + def copy_with(self, scheme=misc.UseExisting, userinfo=misc.UseExisting, + host=misc.UseExisting, port=misc.UseExisting, + path=misc.UseExisting, query=misc.UseExisting, + fragment=misc.UseExisting, lazy_normalize=True): + """Create a copy of this instance replacing with specified parts.""" + attributes = zip(PARSED_COMPONENTS, + (scheme, userinfo, host, port, path, query, fragment)) + attrs_dict = {} + for name, value in attributes: + if value is misc.UseExisting: + value = getattr(self, name) + if not isinstance(value, bytes) and hasattr(value, 'encode'): + value = value.encode(self.encoding) + attrs_dict[name] = value + authority = self._generate_authority(attrs_dict) + to_str = compat.to_str + ref = self.reference.copy_with( + scheme=to_str(attrs_dict['scheme'], self.encoding), + authority=to_str(authority, self.encoding), + path=to_str(attrs_dict['path'], self.encoding), + query=to_str(attrs_dict['query'], self.encoding), + fragment=to_str(attrs_dict['fragment'], self.encoding) + ) + if not lazy_normalize: + ref = ref.normalize() + return ParseResultBytes( + uri_ref=ref, + encoding=self.encoding, + lazy_normalize=lazy_normalize, + **attrs_dict + ) + + def unsplit(self, use_idna=False): + """Create a URI bytes object from the components. + + :returns: The parsed URI reconstituted as a string. + :rtype: bytes + """ + parse_result = self + if use_idna and self.host: + # self.host is bytes, to encode to idna, we need to decode it + # first + host = self.host.decode(self.encoding) + hostbytes = host.encode('idna') + parse_result = self.copy_with(host=hostbytes) + if self.lazy_normalize: + parse_result = parse_result.copy_with(lazy_normalize=False) + uri = parse_result.reference.unsplit() + return uri.encode(self.encoding) + + +def split_authority(authority): + # Initialize our expected return values + userinfo = host = port = None + # Initialize an extra var we may need to use + extra_host = None + # Set-up rest in case there is no userinfo portion + rest = authority + + if '@' in authority: + userinfo, rest = authority.rsplit('@', 1) + + # Handle IPv6 host addresses + if rest.startswith('['): + host, rest = rest.split(']', 1) + host += ']' + + if ':' in rest: + extra_host, port = rest.split(':', 1) + elif not host and rest: + host = rest + + if extra_host and not host: + host = extra_host + + return userinfo, host, port + + +def authority_from(reference, strict): + try: + subauthority = reference.authority_info() + except exceptions.InvalidAuthority: + if strict: + raise + userinfo, host, port = split_authority(reference.authority) + else: + # Thanks to Richard Barrell for this idea: + # https://twitter.com/0x2ba22e11/status/617338811975139328 + userinfo, host, port = (subauthority.get(p) + for p in ('userinfo', 'host', 'port')) + + if port: + try: + port = int(port) + except ValueError: + raise exceptions.InvalidPort(port) + return userinfo, host, port diff --git a/pipenv/vendor/urllib3/packages/rfc3986/uri.py b/pipenv/vendor/urllib3/packages/rfc3986/uri.py new file mode 100644 index 0000000000..d1d71505e2 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/uri.py @@ -0,0 +1,153 @@ +"""Module containing the implementation of the URIReference class.""" +# -*- coding: utf-8 -*- +# Copyright (c) 2014 Rackspace +# Copyright (c) 2015 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from collections import namedtuple + +from . import compat +from . import misc +from . import normalizers +from ._mixin import URIMixin + + +class URIReference(namedtuple('URIReference', misc.URI_COMPONENTS), URIMixin): + """Immutable object representing a parsed URI Reference. + + .. note:: + + This class is not intended to be directly instantiated by the user. + + This object exposes attributes for the following components of a + URI: + + - scheme + - authority + - path + - query + - fragment + + .. attribute:: scheme + + The scheme that was parsed for the URI Reference. For example, + ``http``, ``https``, ``smtp``, ``imap``, etc. + + .. attribute:: authority + + Component of the URI that contains the user information, host, + and port sub-components. For example, + ``google.com``, ``127.0.0.1:5000``, ``username@[::1]``, + ``username:password@example.com:443``, etc. + + .. attribute:: path + + The path that was parsed for the given URI Reference. For example, + ``/``, ``/index.php``, etc. + + .. attribute:: query + + The query component for a given URI Reference. For example, ``a=b``, + ``a=b%20c``, ``a=b+c``, ``a=b,c=d,e=%20f``, etc. + + .. attribute:: fragment + + The fragment component of a URI. For example, ``section-3.1``. + + This class also provides extra attributes for easier access to information + like the subcomponents of the authority component. + + .. attribute:: userinfo + + The user information parsed from the authority. + + .. attribute:: host + + The hostname, IPv4, or IPv6 adddres parsed from the authority. + + .. attribute:: port + + The port parsed from the authority. + """ + + slots = () + + def __new__(cls, scheme, authority, path, query, fragment, + encoding='utf-8'): + """Create a new URIReference.""" + ref = super(URIReference, cls).__new__( + cls, + scheme or None, + authority or None, + path or None, + query, + fragment) + ref.encoding = encoding + return ref + + __hash__ = tuple.__hash__ + + def __eq__(self, other): + """Compare this reference to another.""" + other_ref = other + if isinstance(other, tuple): + other_ref = URIReference(*other) + elif not isinstance(other, URIReference): + try: + other_ref = URIReference.from_string(other) + except TypeError: + raise TypeError( + 'Unable to compare URIReference() to {0}()'.format( + type(other).__name__)) + + # See http://tools.ietf.org/html/rfc3986#section-6.2 + naive_equality = tuple(self) == tuple(other_ref) + return naive_equality or self.normalized_equality(other_ref) + + def normalize(self): + """Normalize this reference as described in Section 6.2.2. + + This is not an in-place normalization. Instead this creates a new + URIReference. + + :returns: A new reference object with normalized components. + :rtype: URIReference + """ + # See http://tools.ietf.org/html/rfc3986#section-6.2.2 for logic in + # this method. + return URIReference(normalizers.normalize_scheme(self.scheme or ''), + normalizers.normalize_authority( + (self.userinfo, self.host, self.port)), + normalizers.normalize_path(self.path or ''), + normalizers.normalize_query(self.query), + normalizers.normalize_fragment(self.fragment), + self.encoding) + + @classmethod + def from_string(cls, uri_string, encoding='utf-8'): + """Parse a URI reference from the given unicode URI string. + + :param str uri_string: Unicode URI to be parsed into a reference. + :param str encoding: The encoding of the string provided + :returns: :class:`URIReference` or subclass thereof + """ + uri_string = compat.to_str(uri_string, encoding) + + split_uri = misc.URI_MATCHER.match(uri_string).groupdict() + return cls( + split_uri['scheme'], split_uri['authority'], + normalizers.encode_component(split_uri['path'], encoding), + normalizers.encode_component(split_uri['query'], encoding), + normalizers.encode_component(split_uri['fragment'], encoding), + encoding, + ) diff --git a/pipenv/vendor/urllib3/packages/rfc3986/validators.py b/pipenv/vendor/urllib3/packages/rfc3986/validators.py new file mode 100644 index 0000000000..7fc97215b1 --- /dev/null +++ b/pipenv/vendor/urllib3/packages/rfc3986/validators.py @@ -0,0 +1,450 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Ian Stapleton Cordasco +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module containing the validation logic for rfc3986.""" +from . import exceptions +from . import misc +from . import normalizers + + +class Validator(object): + """Object used to configure validation of all objects in rfc3986. + + .. versionadded:: 1.0 + + Example usage:: + + >>> from rfc3986 import api, validators + >>> uri = api.uri_reference('https://github.com/') + >>> validator = validators.Validator().require_presence_of( + ... 'scheme', 'host', 'path', + ... ).allow_schemes( + ... 'http', 'https', + ... ).allow_hosts( + ... '127.0.0.1', 'github.com', + ... ) + >>> validator.validate(uri) + >>> invalid_uri = rfc3986.uri_reference('imap://mail.google.com') + >>> validator.validate(invalid_uri) + Traceback (most recent call last): + ... + rfc3986.exceptions.MissingComponentError: ('path was required but + missing', URIReference(scheme=u'imap', authority=u'mail.google.com', + path=None, query=None, fragment=None), ['path']) + + """ + + COMPONENT_NAMES = frozenset([ + 'scheme', + 'userinfo', + 'host', + 'port', + 'path', + 'query', + 'fragment', + ]) + + def __init__(self): + """Initialize our default validations.""" + self.allowed_schemes = set() + self.allowed_hosts = set() + self.allowed_ports = set() + self.allow_password = True + self.required_components = { + 'scheme': False, + 'userinfo': False, + 'host': False, + 'port': False, + 'path': False, + 'query': False, + 'fragment': False, + } + self.validated_components = self.required_components.copy() + + def allow_schemes(self, *schemes): + """Require the scheme to be one of the provided schemes. + + .. versionadded:: 1.0 + + :param schemes: + Schemes, without ``://`` that are allowed. + :returns: + The validator instance. + :rtype: + Validator + """ + for scheme in schemes: + self.allowed_schemes.add(normalizers.normalize_scheme(scheme)) + return self + + def allow_hosts(self, *hosts): + """Require the host to be one of the provided hosts. + + .. versionadded:: 1.0 + + :param hosts: + Hosts that are allowed. + :returns: + The validator instance. + :rtype: + Validator + """ + for host in hosts: + self.allowed_hosts.add(normalizers.normalize_host(host)) + return self + + def allow_ports(self, *ports): + """Require the port to be one of the provided ports. + + .. versionadded:: 1.0 + + :param ports: + Ports that are allowed. + :returns: + The validator instance. + :rtype: + Validator + """ + for port in ports: + port_int = int(port, base=10) + if 0 <= port_int <= 65535: + self.allowed_ports.add(port) + return self + + def allow_use_of_password(self): + """Allow passwords to be present in the URI. + + .. versionadded:: 1.0 + + :returns: + The validator instance. + :rtype: + Validator + """ + self.allow_password = True + return self + + def forbid_use_of_password(self): + """Prevent passwords from being included in the URI. + + .. versionadded:: 1.0 + + :returns: + The validator instance. + :rtype: + Validator + """ + self.allow_password = False + return self + + def check_validity_of(self, *components): + """Check the validity of the components provided. + + This can be specified repeatedly. + + .. versionadded:: 1.1 + + :param components: + Names of components from :attr:`Validator.COMPONENT_NAMES`. + :returns: + The validator instance. + :rtype: + Validator + """ + components = [c.lower() for c in components] + for component in components: + if component not in self.COMPONENT_NAMES: + raise ValueError( + '"{}" is not a valid component'.format(component) + ) + self.validated_components.update({ + component: True for component in components + }) + return self + + def require_presence_of(self, *components): + """Require the components provided. + + This can be specified repeatedly. + + .. versionadded:: 1.0 + + :param components: + Names of components from :attr:`Validator.COMPONENT_NAMES`. + :returns: + The validator instance. + :rtype: + Validator + """ + components = [c.lower() for c in components] + for component in components: + if component not in self.COMPONENT_NAMES: + raise ValueError( + '"{}" is not a valid component'.format(component) + ) + self.required_components.update({ + component: True for component in components + }) + return self + + def validate(self, uri): + """Check a URI for conditions specified on this validator. + + .. versionadded:: 1.0 + + :param uri: + Parsed URI to validate. + :type uri: + rfc3986.uri.URIReference + :raises MissingComponentError: + When a required component is missing. + :raises UnpermittedComponentError: + When a component is not one of those allowed. + :raises PasswordForbidden: + When a password is present in the userinfo component but is + not permitted by configuration. + :raises InvalidComponentsError: + When a component was found to be invalid. + """ + if not self.allow_password: + check_password(uri) + + required_components = [ + component + for component, required in self.required_components.items() + if required + ] + validated_components = [ + component + for component, required in self.validated_components.items() + if required + ] + if required_components: + ensure_required_components_exist(uri, required_components) + if validated_components: + ensure_components_are_valid(uri, validated_components) + + ensure_one_of(self.allowed_schemes, uri, 'scheme') + ensure_one_of(self.allowed_hosts, uri, 'host') + ensure_one_of(self.allowed_ports, uri, 'port') + + +def check_password(uri): + """Assert that there is no password present in the uri.""" + userinfo = uri.userinfo + if not userinfo: + return + credentials = userinfo.split(':', 1) + if len(credentials) <= 1: + return + raise exceptions.PasswordForbidden(uri) + + +def ensure_one_of(allowed_values, uri, attribute): + """Assert that the uri's attribute is one of the allowed values.""" + value = getattr(uri, attribute) + if value is not None and allowed_values and value not in allowed_values: + raise exceptions.UnpermittedComponentError( + attribute, value, allowed_values, + ) + + +def ensure_required_components_exist(uri, required_components): + """Assert that all required components are present in the URI.""" + missing_components = sorted([ + component + for component in required_components + if getattr(uri, component) is None + ]) + if missing_components: + raise exceptions.MissingComponentError(uri, *missing_components) + + +def is_valid(value, matcher, require): + """Determine if a value is valid based on the provided matcher. + + :param str value: + Value to validate. + :param matcher: + Compiled regular expression to use to validate the value. + :param require: + Whether or not the value is required. + """ + if require: + return (value is not None + and matcher.match(value)) + + # require is False and value is not None + return value is None or matcher.match(value) + + +def authority_is_valid(authority, host=None, require=False): + """Determine if the authority string is valid. + + :param str authority: + The authority to validate. + :param str host: + (optional) The host portion of the authority to validate. + :param bool require: + (optional) Specify if authority must not be None. + :returns: + ``True`` if valid, ``False`` otherwise + :rtype: + bool + """ + validated = is_valid(authority, misc.SUBAUTHORITY_MATCHER, require) + if validated and host is not None: + return host_is_valid(host, require) + return validated + + +def host_is_valid(host, require=False): + """Determine if the host string is valid. + + :param str host: + The host to validate. + :param bool require: + (optional) Specify if host must not be None. + :returns: + ``True`` if valid, ``False`` otherwise + :rtype: + bool + """ + validated = is_valid(host, misc.HOST_MATCHER, require) + if validated and host is not None and misc.IPv4_MATCHER.match(host): + return valid_ipv4_host_address(host) + elif validated and host is not None and misc.IPv6_MATCHER.match(host): + return misc.IPv6_NO_RFC4007_MATCHER.match(host) is not None + return validated + + +def scheme_is_valid(scheme, require=False): + """Determine if the scheme is valid. + + :param str scheme: + The scheme string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a scheme. + :returns: + ``True`` if the scheme is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(scheme, misc.SCHEME_MATCHER, require) + + +def path_is_valid(path, require=False): + """Determine if the path component is valid. + + :param str path: + The path string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a path. + :returns: + ``True`` if the path is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(path, misc.PATH_MATCHER, require) + + +def query_is_valid(query, require=False): + """Determine if the query component is valid. + + :param str query: + The query string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a query. + :returns: + ``True`` if the query is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(query, misc.QUERY_MATCHER, require) + + +def fragment_is_valid(fragment, require=False): + """Determine if the fragment component is valid. + + :param str fragment: + The fragment string to validate. + :param bool require: + (optional) Set to ``True`` to require the presence of a fragment. + :returns: + ``True`` if the fragment is valid. ``False`` otherwise. + :rtype: + bool + """ + return is_valid(fragment, misc.FRAGMENT_MATCHER, require) + + +def valid_ipv4_host_address(host): + """Determine if the given host is a valid IPv4 address.""" + # If the host exists, and it might be IPv4, check each byte in the + # address. + return all([0 <= int(byte, base=10) <= 255 for byte in host.split('.')]) + + +_COMPONENT_VALIDATORS = { + 'scheme': scheme_is_valid, + 'path': path_is_valid, + 'query': query_is_valid, + 'fragment': fragment_is_valid, +} + +_SUBAUTHORITY_VALIDATORS = set(['userinfo', 'host', 'port']) + + +def subauthority_component_is_valid(uri, component): + """Determine if the userinfo, host, and port are valid.""" + try: + subauthority_dict = uri.authority_info() + except exceptions.InvalidAuthority: + return False + + # If we can parse the authority into sub-components and we're not + # validating the port, we can assume it's valid. + if component == 'host': + return host_is_valid(subauthority_dict['host']) + elif component != 'port': + return True + + try: + port = int(subauthority_dict['port']) + except TypeError: + # If the port wasn't provided it'll be None and int(None) raises a + # TypeError + return True + + return (0 <= port <= 65535) + + +def ensure_components_are_valid(uri, validated_components): + """Assert that all components are valid in the URI.""" + invalid_components = set([]) + for component in validated_components: + if component in _SUBAUTHORITY_VALIDATORS: + if not subauthority_component_is_valid(uri, component): + invalid_components.add(component) + # Python's peephole optimizer means that while this continue *is* + # actually executed, coverage.py cannot detect that. See also, + # https://bitbucket.org/ned/coveragepy/issues/198/continue-marked-as-not-covered + continue # nocov: Python 2.7, 3.3, 3.4 + + validator = _COMPONENT_VALIDATORS[component] + if not validator(getattr(uri, component)): + invalid_components.add(component) + + if invalid_components: + raise exceptions.InvalidComponentsError(uri, *invalid_components) diff --git a/pipenv/vendor/urllib3/poolmanager.py b/pipenv/vendor/urllib3/poolmanager.py index fe5491cfda..a6ade6e905 100644 --- a/pipenv/vendor/urllib3/poolmanager.py +++ b/pipenv/vendor/urllib3/poolmanager.py @@ -7,6 +7,7 @@ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown +from .packages import six from .packages.six.moves.urllib.parse import urljoin from .request import RequestMethods from .util.url import parse_url @@ -19,7 +20,8 @@ log = logging.getLogger(__name__) SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', - 'ssl_version', 'ca_cert_dir', 'ssl_context') + 'ssl_version', 'ca_cert_dir', 'ssl_context', + 'key_password') # All known keyword arguments that could be provided to the pool manager, its # pools, or the underlying connections. This is used to construct a pool key. @@ -33,6 +35,7 @@ 'key_block', # bool 'key_source_address', # str 'key_key_file', # str + 'key_key_password', # str 'key_cert_file', # str 'key_cert_reqs', # str 'key_ca_certs', # str @@ -47,7 +50,7 @@ 'key__socks_options', # dict 'key_assert_hostname', # bool or string 'key_assert_fingerprint', # str - 'key_server_hostname', #str + 'key_server_hostname', # str ) #: The namedtuple class used to construct keys for the connection pool. @@ -342,8 +345,10 @@ def urlopen(self, method, url, redirect=True, **kw): # conn.is_same_host() which may use socket.gethostbyname() in the future. if (retries.remove_headers_on_redirect and not conn.is_same_host(redirect_location)): - for header in retries.remove_headers_on_redirect: - kw['headers'].pop(header, None) + headers = list(six.iterkeys(kw['headers'])) + for header in headers: + if header.lower() in retries.remove_headers_on_redirect: + kw['headers'].pop(header, None) try: retries = retries.increment(method, url, response=response, _pool=conn) diff --git a/pipenv/vendor/urllib3/response.py b/pipenv/vendor/urllib3/response.py index c112690b0a..4f857932c5 100644 --- a/pipenv/vendor/urllib3/response.py +++ b/pipenv/vendor/urllib3/response.py @@ -6,6 +6,11 @@ from socket import timeout as SocketTimeout from socket import error as SocketError +try: + import brotli +except ImportError: + brotli = None + from ._collections import HTTPHeaderDict from .exceptions import ( BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, @@ -90,6 +95,25 @@ def decompress(self, data): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) +if brotli is not None: + class BrotliDecoder(object): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self): + self._obj = brotli.Decompressor() + + def decompress(self, data): + if hasattr(self._obj, 'decompress'): + return self._obj.decompress(data) + return self._obj.process(data) + + def flush(self): + if hasattr(self._obj, 'flush'): + return self._obj.flush() + return b'' + + class MultiDecoder(object): """ From RFC7231: @@ -118,6 +142,9 @@ def _get_decoder(mode): if mode == 'gzip': return GzipDecoder() + if brotli is not None and mode == 'br': + return BrotliDecoder() + return DeflateDecoder() @@ -155,6 +182,8 @@ class is also compatible with the Python standard library's :mod:`io` """ CONTENT_DECODERS = ['gzip', 'deflate'] + if brotli is not None: + CONTENT_DECODERS += ['br'] REDIRECT_STATUSES = [301, 302, 303, 307, 308] def __init__(self, body='', headers=None, status=0, version=0, reason=None, @@ -311,24 +340,32 @@ def _init_decoder(self): if content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) elif ',' in content_encoding: - encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS] + encodings = [ + e.strip() for e in content_encoding.split(',') + if e.strip() in self.CONTENT_DECODERS] if len(encodings): self._decoder = _get_decoder(content_encoding) + DECODER_ERROR_CLASSES = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + def _decode(self, data, decode_content, flush_decoder): """ Decode the data passed in and potentially flush the decoder. """ + if not decode_content: + return data + try: - if decode_content and self._decoder: + if self._decoder: data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: + except self.DECODER_ERROR_CLASSES as e: content_encoding = self.headers.get('content-encoding', '').lower() raise DecodeError( "Received response with content-encoding: %s, but " "failed to decode it." % content_encoding, e) - - if flush_decoder and decode_content: + if flush_decoder: data += self._flush_decoder() return data @@ -508,9 +545,10 @@ def from_httplib(ResponseCls, r, **response_kw): headers = r.msg if not isinstance(headers, HTTPHeaderDict): - if PY3: # Python 3 + if PY3: headers = HTTPHeaderDict(headers.items()) - else: # Python 2 + else: + # Python 2.7 headers = HTTPHeaderDict.from_httplib(headers) # HTTPResponse objects in Python 3 don't have a .strict attribute @@ -703,3 +741,20 @@ def geturl(self): return self.retries.history[-1].redirect_location else: return self._request_url + + def __iter__(self): + buffer = [b""] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunk = chunk.split(b"\n") + yield b"".join(buffer) + chunk[0] + b"\n" + for x in chunk[1:-1]: + yield x + b"\n" + if chunk[-1]: + buffer = [chunk[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/pipenv/vendor/urllib3/util/__init__.py b/pipenv/vendor/urllib3/util/__init__.py index 2f2770b622..2914bb468b 100644 --- a/pipenv/vendor/urllib3/util/__init__.py +++ b/pipenv/vendor/urllib3/util/__init__.py @@ -12,6 +12,7 @@ resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, + PROTOCOL_TLS, ) from .timeout import ( current_time, @@ -35,6 +36,7 @@ 'IS_PYOPENSSL', 'IS_SECURETRANSPORT', 'SSLContext', + 'PROTOCOL_TLS', 'Retry', 'Timeout', 'Url', diff --git a/pipenv/vendor/urllib3/util/request.py b/pipenv/vendor/urllib3/util/request.py index 3ddfcd5594..280b8530c6 100644 --- a/pipenv/vendor/urllib3/util/request.py +++ b/pipenv/vendor/urllib3/util/request.py @@ -5,6 +5,13 @@ from ..exceptions import UnrewindableBodyError ACCEPT_ENCODING = 'gzip,deflate' +try: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ',br' + _FAILEDTELL = object() diff --git a/pipenv/vendor/urllib3/util/retry.py b/pipenv/vendor/urllib3/util/retry.py index e7d0abd610..02429ee8e4 100644 --- a/pipenv/vendor/urllib3/util/retry.py +++ b/pipenv/vendor/urllib3/util/retry.py @@ -179,7 +179,8 @@ def __init__(self, total=10, connect=None, read=None, redirect=None, status=None self.raise_on_status = raise_on_status self.history = history or tuple() self.respect_retry_after_header = respect_retry_after_header - self.remove_headers_on_redirect = remove_headers_on_redirect + self.remove_headers_on_redirect = frozenset([ + h.lower() for h in remove_headers_on_redirect]) def new(self, **kw): params = dict( diff --git a/pipenv/vendor/urllib3/util/ssl_.py b/pipenv/vendor/urllib3/util/ssl_.py index 64ea192a85..f271ce9301 100644 --- a/pipenv/vendor/urllib3/util/ssl_.py +++ b/pipenv/vendor/urllib3/util/ssl_.py @@ -2,13 +2,14 @@ import errno import warnings import hmac -import socket +import re from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning from ..packages import six +from ..packages.rfc3986 import abnf_regexp SSLContext = None @@ -40,14 +41,33 @@ def _const_compare_digest_backport(a, b): _const_compare_digest = getattr(hmac, 'compare_digest', _const_compare_digest_backport) +# Borrow rfc3986's regular expressions for IPv4 +# and IPv6 addresses for use in is_ipaddress() +_IP_ADDRESS_REGEX = re.compile( + r'^(?:%s|%s|%s)$' % ( + abnf_regexp.IPv4_RE, + abnf_regexp.IPv6_RE, + abnf_regexp.IPv6_ADDRZ_RFC4007_RE + ) +) try: # Test for SSL features import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import wrap_socket, CERT_REQUIRED from ssl import HAS_SNI # Has SNI? except ImportError: pass +try: # Platform-specific: Python 3.6 + from ssl import PROTOCOL_TLS + PROTOCOL_SSLv23 = PROTOCOL_TLS +except ImportError: + try: + from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS + PROTOCOL_SSLv23 = PROTOCOL_TLS + except ImportError: + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 + try: from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION @@ -56,25 +76,6 @@ def _const_compare_digest_backport(a, b): OP_NO_COMPRESSION = 0x20000 -# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in -# those cases. This means that we can only detect IPv4 addresses in this case. -if hasattr(socket, 'inet_pton'): - inet_pton = socket.inet_pton -else: - # Maybe we can use ipaddress if the user has urllib3[secure]? - try: - import ipaddress - - def inet_pton(_, host): - if isinstance(host, bytes): - host = host.decode('ascii') - return ipaddress.ip_address(host) - - except ImportError: # Platform-specific: Non-Linux - def inet_pton(_, host): - return socket.inet_aton(host) - - # A secure default. # Sources for more information on TLS ciphers: # @@ -83,37 +84,35 @@ def inet_pton(_, host): # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ # # The general intent is: -# - Prefer TLS 1.3 cipher suites # - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), # - prefer ECDHE over DHE for better performance, # - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and # security, # - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. +# - disable NULL authentication, MD5 MACs, DSS, and other +# insecure ciphers for security reasons. +# - NOTE: TLS 1.3 cipher suites are managed through a different interface +# not exposed by CPython (yet!) and are enabled by default if they're available. DEFAULT_CIPHERS = ':'.join([ - 'TLS13-AES-256-GCM-SHA384', - 'TLS13-CHACHA20-POLY1305-SHA256', - 'TLS13-AES-128-GCM-SHA256', + 'ECDHE+AESGCM', + 'ECDHE+CHACHA20', + 'DHE+AESGCM', + 'DHE+CHACHA20', 'ECDH+AESGCM', - 'ECDH+CHACHA20', 'DH+AESGCM', - 'DH+CHACHA20', - 'ECDH+AES256', - 'DH+AES256', - 'ECDH+AES128', + 'ECDH+AES', 'DH+AES', 'RSA+AESGCM', 'RSA+AES', '!aNULL', '!eNULL', '!MD5', + '!DSS', ]) try: from ssl import SSLContext # Modern SSL? except ImportError: - import sys - class SSLContext(object): # Platform-specific: Python 2 def __init__(self, protocol_version): self.protocol = protocol_version @@ -199,7 +198,7 @@ def resolve_cert_reqs(candidate): constant which can directly be passed to wrap_socket. """ if candidate is None: - return CERT_NONE + return CERT_REQUIRED if isinstance(candidate, str): res = getattr(ssl, candidate, None) @@ -215,7 +214,7 @@ def resolve_ssl_version(candidate): like resolve_cert_reqs """ if candidate is None: - return PROTOCOL_SSLv23 + return PROTOCOL_TLS if isinstance(candidate, str): res = getattr(ssl, candidate, None) @@ -261,7 +260,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, Constructed SSLContext object with specified options :rtype: SSLContext """ - context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + context = SSLContext(ssl_version or PROTOCOL_TLS) context.set_ciphers(ciphers or DEFAULT_CIPHERS) @@ -291,7 +290,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None, ciphers=None, ssl_context=None, - ca_cert_dir=None): + ca_cert_dir=None, key_password=None): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. @@ -307,6 +306,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. """ context = ssl_context if context is None: @@ -327,12 +328,22 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if e.errno == errno.ENOENT: raise SSLError(e) raise - elif getattr(context, 'load_default_certs', None) is not None: + + elif ssl_context is None and hasattr(context, 'load_default_certs'): # try to load OS default certs; works well on Windows (require Python3.4+) context.load_default_certs() + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + if certfile: - context.load_cert_chain(certfile, keyfile) + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) # If we detect server_hostname is an IP address then the SNI # extension should not be used according to RFC3546 Section 3.1 @@ -358,7 +369,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, def is_ipaddress(hostname): - """Detects whether the hostname given is an IP address. + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. @@ -366,16 +378,15 @@ def is_ipaddress(hostname): if six.PY3 and isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. hostname = hostname.decode('ascii') + return _IP_ADDRESS_REGEX.match(hostname) is not None - families = [socket.AF_INET] - if hasattr(socket, 'AF_INET6'): - families.append(socket.AF_INET6) - for af in families: - try: - inet_pton(af, hostname) - except (socket.error, ValueError, OSError): - pass - else: - return True +def _is_key_file_encrypted(key_file): + """Detects if a key file is encrypted or not.""" + with open(key_file, 'r') as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if 'ENCRYPTED' in line: + return True + return False diff --git a/pipenv/vendor/urllib3/util/timeout.py b/pipenv/vendor/urllib3/util/timeout.py index cec817e6ef..a4d004a848 100644 --- a/pipenv/vendor/urllib3/util/timeout.py +++ b/pipenv/vendor/urllib3/util/timeout.py @@ -131,7 +131,8 @@ def _validate_timeout(cls, value, name): raise ValueError("Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than or equal to 0." % (name, value)) - except TypeError: # Python 3 + except TypeError: + # Python 3 raise ValueError("Timeout value %s was %s, but it must be an " "int, float or None." % (name, value)) diff --git a/pipenv/vendor/urllib3/util/url.py b/pipenv/vendor/urllib3/util/url.py index 6b6f9968d7..0bc6ced756 100644 --- a/pipenv/vendor/urllib3/util/url.py +++ b/pipenv/vendor/urllib3/util/url.py @@ -1,7 +1,12 @@ from __future__ import absolute_import +import re from collections import namedtuple from ..exceptions import LocationParseError +from ..packages import six, rfc3986 +from ..packages.rfc3986.exceptions import RFC3986Exception, ValidationError +from ..packages.rfc3986.validators import Validator +from ..packages.rfc3986 import abnf_regexp, normalizers, compat, misc url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] @@ -10,10 +15,16 @@ # urllib3 infers URLs without a scheme (None) to be http. NORMALIZABLE_SCHEMES = ('http', 'https', None) +# Regex for detecting URLs with schemes. RFC 3986 Section 3.1 +SCHEME_REGEX = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+\-]*:|/)") + +PATH_CHARS = abnf_regexp.UNRESERVED_CHARS_SET | abnf_regexp.SUB_DELIMITERS_SET | {':', '@', '/'} +QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {'?'} + class Url(namedtuple('Url', url_attrs)): """ - Datastructure for representing an HTTP URL. Used as a return value for + Data structure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. """ @@ -23,10 +34,8 @@ def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): if path and not path.startswith('/'): path = '/' + path - if scheme: + if scheme is not None: scheme = scheme.lower() - if host and scheme in NORMALIZABLE_SCHEMES: - host = host.lower() return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) @@ -72,23 +81,23 @@ def url(self): 'http://username:password@host.com:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self - url = '' + url = u'' # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: - url += scheme + '://' + url += scheme + u'://' if auth is not None: - url += auth + '@' + url += auth + u'@' if host is not None: url += host if port is not None: - url += ':' + str(port) + url += u':' + str(port) if path is not None: url += path if query is not None: - url += '?' + query + url += u'?' + query if fragment is not None: - url += '#' + fragment + url += u'#' + fragment return url @@ -98,6 +107,8 @@ def __str__(self): def split_first(s, delims): """ + .. deprecated:: 1.25 + Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. @@ -129,10 +140,44 @@ def split_first(s, delims): return s[:min_idx], s[min_idx + 1:], min_delim +def _encode_invalid_chars(component, allowed_chars, encoding='utf-8'): + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. Based on + rfc3986.normalizers.encode_component() + """ + if component is None: + return component + + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + percent_encodings = len(normalizers.PERCENT_MATCHER.findall( + compat.to_str(component, encoding))) + + uri_bytes = component.encode('utf-8', 'surrogatepass') + is_percent_encoded = percent_encodings == uri_bytes.count(b'%') + + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i:i+1] + byte_ord = ord(byte) + if ((is_percent_encoded and byte == b'%') + or (byte_ord < 128 and byte.decode() in allowed_chars)): + encoded_component.extend(byte) + continue + encoded_component.extend('%{0:02x}'.format(byte_ord).encode().upper()) + + return encoded_component.decode(encoding) + + def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 compliant. + + :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urlparse`. @@ -145,81 +190,95 @@ def parse_url(url): >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - if not url: # Empty return Url() - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. No whitespace, no plus or - # minus prefixes, no non-integer digits such as ^2 (superscript). - if not port.isdigit(): - raise LocationParseError(url) - try: - port = int(port) - except ValueError: - raise LocationParseError(url) - else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None + is_string = not isinstance(url, six.binary_type) - elif not host and url: - host = url + # RFC 3986 doesn't like URLs that have a host but don't start + # with a scheme and we support URLs like that so we need to + # detect that problem and add an empty scheme indication. + # We don't get hurt on path-only URLs here as it's stripped + # off and given an empty scheme anyways. + if not SCHEME_REGEX.search(url): + url = "//" + url + def idna_encode(name): + if name and any([ord(x) > 128 for x in name]): + try: + import idna + except ImportError: + raise LocationParseError("Unable to parse URL without the 'idna' module") + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + raise LocationParseError(u"Name '%s' is not a valid IDNA label" % name) + return name + + try: + split_iri = misc.IRI_MATCHER.match(compat.to_str(url)).groupdict() + iri_ref = rfc3986.IRIReference( + split_iri['scheme'], split_iri['authority'], + _encode_invalid_chars(split_iri['path'], PATH_CHARS), + _encode_invalid_chars(split_iri['query'], QUERY_CHARS), + _encode_invalid_chars(split_iri['fragment'], FRAGMENT_CHARS) + ) + has_authority = iri_ref.authority is not None + uri_ref = iri_ref.encode(idna_encoder=idna_encode) + except (ValueError, RFC3986Exception): + return six.raise_from(LocationParseError(url), None) + + # rfc3986 strips the authority if it's invalid + if has_authority and uri_ref.authority is None: + raise LocationParseError(url) + + # Only normalize schemes we understand to not break http+unix + # or other schemes that don't follow RFC 3986. + if uri_ref.scheme is None or uri_ref.scheme.lower() in NORMALIZABLE_SCHEMES: + uri_ref = uri_ref.normalize() + + # Validate all URIReference components and ensure that all + # components that were set before are still set after + # normalization has completed. + validator = Validator() + try: + validator.check_validity_of( + *validator.COMPONENT_NAMES + ).validate(uri_ref) + except ValidationError: + return six.raise_from(LocationParseError(url), None) + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. + path = uri_ref.path if not path: - return Url(scheme, auth, host, port, path, query, fragment) - - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) - - # Query - if '?' in path: - path, query = path.split('?', 1) - - return Url(scheme, auth, host, port, path, query, fragment) + if (uri_ref.query is not None + or uri_ref.fragment is not None): + path = "" + else: + path = None + + # Ensure that each part of the URL is a `str` for + # backwards compatibility. + def to_input_type(x): + if x is None: + return None + elif not is_string and not isinstance(x, six.binary_type): + return x.encode('utf-8') + return x + + return Url( + scheme=to_input_type(uri_ref.scheme), + auth=to_input_type(uri_ref.userinfo), + host=to_input_type(uri_ref.host), + port=int(uri_ref.port) if uri_ref.port is not None else None, + path=to_input_type(path), + query=to_input_type(uri_ref.query), + fragment=to_input_type(uri_ref.fragment) + ) def get_host(url): diff --git a/pipenv/vendor/vistir/backports/__init__.py b/pipenv/vendor/vistir/backports/__init__.py index 03859e4f40..b5ed656229 100644 --- a/pipenv/vendor/vistir/backports/__init__.py +++ b/pipenv/vendor/vistir/backports/__init__.py @@ -2,6 +2,7 @@ from __future__ import absolute_import, unicode_literals from .functools import partialmethod +from .surrogateescape import register_surrogateescape from .tempfile import NamedTemporaryFile -__all__ = ["NamedTemporaryFile", "partialmethod"] +__all__ = ["NamedTemporaryFile", "partialmethod", "register_surrogateescape"] diff --git a/pipenv/vendor/vistir/backports/surrogateescape.py b/pipenv/vendor/vistir/backports/surrogateescape.py new file mode 100644 index 0000000000..0532be08bf --- /dev/null +++ b/pipenv/vendor/vistir/backports/surrogateescape.py @@ -0,0 +1,196 @@ +""" +This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error +handler of Python 3. +Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc +""" + +# This code is released under the Python license and the BSD 2-clause license + +import codecs +import sys + +import six + +FS_ERRORS = "surrogateescape" + +# # -- Python 2/3 compatibility ------------------------------------- +# FS_ERRORS = 'my_surrogateescape' + + +def u(text): + if six.PY3: + return text + else: + return text.decode("unicode_escape") + + +def b(data): + if six.PY3: + return data.encode("latin1") + else: + return data + + +if six.PY3: + _unichr = chr + bytes_chr = lambda code: bytes((code,)) +else: + _unichr = unichr + bytes_chr = chr + + +def surrogateescape_handler(exc): + """ + Pure Python implementation of the PEP 383: the "surrogateescape" error + handler of Python 3. Undecodable bytes will be replaced by a Unicode + character U+DCxx on decoding, and these are translated into the + original bytes on encoding. + """ + mystring = exc.object[exc.start : exc.end] + + try: + if isinstance(exc, UnicodeDecodeError): + # mystring is a byte-string in this case + decoded = replace_surrogate_decode(mystring) + elif isinstance(exc, UnicodeEncodeError): + # In the case of u'\udcc3'.encode('ascii', + # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an + # exception anyway after this function is called, even though I think + # it's doing what it should. It seems that the strict encoder is called + # to encode the unicode string that this function returns ... + decoded = replace_surrogate_encode(mystring) + else: + raise exc + except NotASurrogateError: + raise exc + return (decoded, exc.end) + + +class NotASurrogateError(Exception): + pass + + +def replace_surrogate_encode(mystring): + """ + Returns a (unicode) string, not the more logical bytes, because the codecs + register_error functionality expects this. + """ + decoded = [] + for ch in mystring: + # if utils.PY3: + # code = ch + # else: + code = ord(ch) + + # The following magic comes from Py3.3's Python/codecs.c file: + if not 0xD800 <= code <= 0xDCFF: + # Not a surrogate. Fail with the original exception. + raise NotASurrogateError + # mybytes = [0xe0 | (code >> 12), + # 0x80 | ((code >> 6) & 0x3f), + # 0x80 | (code & 0x3f)] + # Is this a good idea? + if 0xDC00 <= code <= 0xDC7F: + decoded.append(_unichr(code - 0xDC00)) + elif code <= 0xDCFF: + decoded.append(_unichr(code - 0xDC00)) + else: + raise NotASurrogateError + return str().join(decoded) + + +def replace_surrogate_decode(mybytes): + """ + Returns a (unicode) string + """ + decoded = [] + for ch in mybytes: + # We may be parsing newbytes (in which case ch is an int) or a native + # str on Py2 + if isinstance(ch, int): + code = ch + else: + code = ord(ch) + if 0x80 <= code <= 0xFF: + decoded.append(_unichr(0xDC00 + code)) + elif code <= 0x7F: + decoded.append(_unichr(code)) + else: + # # It may be a bad byte + # # Try swallowing it. + # continue + # print("RAISE!") + raise NotASurrogateError + return str().join(decoded) + + +def encodefilename(fn): + if FS_ENCODING == "ascii": + # ASCII encoder of Python 2 expects that the error handler returns a + # Unicode string encodable to ASCII, whereas our surrogateescape error + # handler has to return bytes in 0x80-0xFF range. + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if code < 128: + ch = bytes_chr(code) + elif 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + else: + raise UnicodeEncodeError( + FS_ENCODING, fn, index, index + 1, "ordinal not in range(128)" + ) + encoded.append(ch) + return bytes().join(encoded) + elif FS_ENCODING == "utf-8": + # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF + # doesn't go through our error handler + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if 0xD800 <= code <= 0xDFFF: + if 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + encoded.append(ch) + else: + raise UnicodeEncodeError( + FS_ENCODING, fn, index, index + 1, "surrogates not allowed" + ) + else: + ch_utf8 = ch.encode("utf-8") + encoded.append(ch_utf8) + return bytes().join(encoded) + else: + return fn.encode(FS_ENCODING, FS_ERRORS) + + +def decodefilename(fn): + return fn.decode(FS_ENCODING, FS_ERRORS) + + +FS_ENCODING = "ascii" +fn = b("[abc\xff]") +encoded = u("[abc\udcff]") +# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') +# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') + + +# normalize the filesystem encoding name. +# For example, we expect "utf-8", not "UTF8". +FS_ENCODING = codecs.lookup(FS_ENCODING).name + + +def register_surrogateescape(): + """ + Registers the surrogateescape error handler on Python 2 (only) + """ + if six.PY3: + return + try: + codecs.lookup_error(FS_ERRORS) + except LookupError: + codecs.register_error(FS_ERRORS, surrogateescape_handler) + + +if __name__ == "__main__": + pass diff --git a/pipenv/vendor/vistir/backports/tempfile.py b/pipenv/vendor/vistir/backports/tempfile.py index ce66138b5d..a3d7f3df5c 100644 --- a/pipenv/vendor/vistir/backports/tempfile.py +++ b/pipenv/vendor/vistir/backports/tempfile.py @@ -15,6 +15,24 @@ from pipenv.vendor.backports.weakref import finalize +def fs_encode(path): + try: + return os.fsencode(path) + except AttributeError: + from ..compat import fs_encode + + return fs_encode(path) + + +def fs_decode(path): + try: + return os.fsdecode(path) + except AttributeError: + from ..compat import fs_decode + + return fs_decode(path) + + __all__ = ["finalize", "NamedTemporaryFile"] @@ -48,7 +66,7 @@ def _sanitize_params(prefix, suffix, dir): if output_type is str: dir = gettempdir() else: - dir = os.fsencode(gettempdir()) + dir = fs_encode(gettempdir()) return prefix, suffix, dir, output_type diff --git a/pipenv/vendor/vistir/compat.py b/pipenv/vendor/vistir/compat.py index 417a785436..a44aafbe44 100644 --- a/pipenv/vendor/vistir/compat.py +++ b/pipenv/vendor/vistir/compat.py @@ -40,35 +40,35 @@ "_fs_decode_errors", ] -if sys.version_info >= (3, 5): +if sys.version_info >= (3, 5): # pragma: no cover from pathlib import Path -else: - from pathlib2 import Path +else: # pragma: no cover + from pipenv.vendor.pathlib2 import Path -if six.PY3: +if six.PY3: # pragma: no cover # Only Python 3.4+ is supported from functools import lru_cache, partialmethod from tempfile import NamedTemporaryFile from shutil import get_terminal_size from weakref import finalize -else: +else: # pragma: no cover # Only Python 2.7 is supported - from backports.functools_lru_cache import lru_cache + from pipenv.vendor.backports.functools_lru_cache import lru_cache from .backports.functools import partialmethod # type: ignore - from backports.shutil_get_terminal_size import get_terminal_size + from pipenv.vendor.backports.shutil_get_terminal_size import get_terminal_size from .backports.surrogateescape import register_surrogateescape register_surrogateescape() NamedTemporaryFile = _NamedTemporaryFile - from backports.weakref import finalize # type: ignore + from pipenv.vendor.backports.weakref import finalize # type: ignore try: # Introduced Python 3.5 from json import JSONDecodeError -except ImportError: +except ImportError: # pragma: no cover JSONDecodeError = ValueError # type: ignore -if six.PY2: +if six.PY2: # pragma: no cover from io import BytesIO as StringIO @@ -98,7 +98,7 @@ def __init__(self, *args, **kwargs): super(FileExistsError, self).__init__(*args, **kwargs) -else: +else: # pragma: no cover from builtins import ( ResourceWarning, FileNotFoundError, @@ -139,7 +139,7 @@ def is_type_checking(): return TYPE_CHECKING -IS_TYPE_CHECKING = is_type_checking() +IS_TYPE_CHECKING = os.environ.get("MYPY_RUNNING", is_type_checking()) class TemporaryDirectory(object): @@ -351,23 +351,25 @@ def fs_decode(path): if path is None: raise TypeError("expected a valid path to decode") if isinstance(path, six.binary_type): - if six.PY2: - from array import array + import array - indexes = _invalid_utf8_indexes(array(str("B"), path)) + indexes = _invalid_utf8_indexes(array.array(str("B"), path)) + if six.PY2: return "".join( chunk.decode(_fs_encoding, _fs_decode_errors) for chunk in _chunks(path, indexes) ) + if indexes and os.name == "nt": + return path.decode(_fs_encoding, "surrogateescape") return path.decode(_fs_encoding, _fs_decode_errors) return path -if sys.version_info[0] < 3: +if sys.version_info[0] < 3: # pragma: no cover _fs_encode_errors = "surrogateescape" _fs_decode_errors = "surrogateescape" _fs_encoding = "utf-8" -else: +else: # pragma: no cover _fs_encoding = "utf-8" if sys.platform.startswith("win"): _fs_error_fn = None diff --git a/pipenv/vendor/vistir/environment.py b/pipenv/vendor/vistir/environment.py new file mode 100644 index 0000000000..b8490c001a --- /dev/null +++ b/pipenv/vendor/vistir/environment.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function + +from .compat import IS_TYPE_CHECKING + +MYPY_RUNNING = IS_TYPE_CHECKING diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index ae7268608d..b2df8f977d 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -26,6 +26,7 @@ to_native_string, ) from .contextmanagers import spinner as spinner +from .environment import MYPY_RUNNING from .termcolors import ANSI_REMOVAL_RE, colorize if os.name != "nt": @@ -55,7 +56,13 @@ class WindowsError(OSError): ] +if MYPY_RUNNING: + from typing import Any, Dict, List, Optional, Union + from .spin import VistirSpinner + + def _get_logger(name=None, level="ERROR"): + # type: (Optional[str], str) -> logging.Logger if not name: name = __name__ if isinstance(level, six.string_types): @@ -72,6 +79,7 @@ def _get_logger(name=None, level="ERROR"): def shell_escape(cmd): + # type: (Union[str, List[str]]) -> str """Escape strings for use in :func:`~subprocess.Popen` and :func:`run`. This is a passthrough method for instantiating a :class:`~vistir.cmdparse.Script` @@ -82,6 +90,7 @@ def shell_escape(cmd): def unnest(elem): + # type: (Iterable) -> Any """Flatten an arbitrarily nested iterable :param elem: An iterable to flatten @@ -96,22 +105,27 @@ def unnest(elem): elem, target = tee(elem, 2) else: target = elem - for el in target: - if isinstance(el, Iterable) and not isinstance(el, six.string_types): - el, el_copy = tee(el, 2) - for sub in unnest(el_copy): - yield sub - else: - yield el + if not target or not _is_iterable(target): + yield target + else: + for el in target: + if isinstance(el, Iterable) and not isinstance(el, six.string_types): + el, el_copy = tee(el, 2) + for sub in unnest(el_copy): + yield sub + else: + yield el def _is_iterable(elem): - if getattr(elem, "__iter__", False): + # type: (Any) -> bool + if getattr(elem, "__iter__", False) or isinstance(elem, Iterable): return True return False def dedup(iterable): + # type: (Iterable) -> Iterable """Deduplicate an iterable object like iter(set(iterable)) but order-reserved. """ @@ -119,6 +133,7 @@ def dedup(iterable): def _spawn_subprocess(script, env=None, block=True, cwd=None, combine_stderr=True): + # type: (Union[str, List[str]], Optional[Dict[str, str], bool, Optional[str], bool]) -> subprocess.Popen from distutils.spawn import find_executable if not env: @@ -146,7 +161,7 @@ def _spawn_subprocess(script, env=None, block=True, cwd=None, combine_stderr=Tru # a "command" that is non-executable. See pypa/pipenv#2727. try: return subprocess.Popen(cmd, **options) - except WindowsError as e: + except WindowsError as e: # pragma: no cover if getattr(e, "winerror", 9999) != 193: raise options["shell"] = True @@ -203,6 +218,25 @@ def get_stream_results(cmd_instance, verbose, maxlen, spinner=None, stdout_allow return stream_results +def _handle_nonblocking_subprocess(c, spinner=None): + # type: (subprocess.Popen, VistirSpinner) -> subprocess.Popen + try: + c.wait() + finally: + if c.stdout: + c.stdout.close() + if c.stderr: + c.stderr.close() + if spinner: + if c.returncode > 0: + spinner.fail(to_native_string("Failed...cleaning up...")) + if not os.name == "nt": + spinner.ok(to_native_string("✔ Complete")) + else: + spinner.ok(to_native_string("Complete")) + return c + + def _create_subprocess( cmd, env=None, @@ -225,9 +259,12 @@ def _create_subprocess( except Exception as exc: import traceback - formatted_tb = "".join(traceback.format_exception(*sys.exc_info())) # pragma: no cover + formatted_tb = "".join( + traceback.format_exception(*sys.exc_info()) + ) # pragma: no cover sys.stderr.write( # pragma: no cover - "Error while executing command %s:" % to_native_string(" ".join(cmd._parts)) # pragma: no cover + "Error while executing command %s:" + % to_native_string(" ".join(cmd._parts)) # pragma: no cover ) # pragma: no cover sys.stderr.write(formatted_tb) # pragma: no cover raise exc # pragma: no cover @@ -245,26 +282,17 @@ def _create_subprocess( spinner=spinner, stdout_allowed=write_to_stdout, ) - try: - c.wait() - finally: - if c.stdout: - c.stdout.close() - if c.stderr: - c.stderr.close() - if spinner: - if c.returncode > 0: - spinner.fail(to_native_string("Failed...cleaning up...")) - if not os.name == "nt": - spinner.ok(to_native_string("✔ Complete")) - else: - spinner.ok(to_native_string("Complete")) + _handle_nonblocking_subprocess(c, spinner) output = stream_results["stdout"] err = stream_results["stderr"] c.out = "\n".join(output) if output else "" c.err = "\n".join(err) if err else "" else: - c.out, c.err = c.communicate() + try: + c.out, c.err = c.communicate() + except (SystemExit, TimeoutError): + c.terminate() + c.out, c.err = c.communicate() if not block: c.wait() c.out = to_text("{0}".format(c.out)) if c.out else fs_str("") @@ -432,8 +460,8 @@ def to_bytes(string, encoding="utf-8", errors=None): else: return string.decode(unicode_name).encode(encoding, errors) elif isinstance(string, memoryview): - return bytes(string) - elif not isinstance(string, six.string_types): + return string.tobytes() + elif not isinstance(string, six.string_types): # pragma: no cover try: if six.PY3: return six.text_type(string).encode(encoding, errors) @@ -476,13 +504,13 @@ def to_text(string, encoding="utf-8", errors=None): string = six.text_type(string, encoding, errors) else: string = six.text_type(string) - elif hasattr(string, "__unicode__"): + elif hasattr(string, "__unicode__"): # pragma: no cover string = six.text_type(string) else: string = six.text_type(bytes(string), encoding, errors) else: string = string.decode(encoding, errors) - except UnicodeDecodeError: + except UnicodeDecodeError: # pragma: no cover string = " ".join(to_text(arg, encoding, errors) for arg in string) return string @@ -795,7 +823,7 @@ def seekable(self): def _isatty(stream): try: is_a_tty = stream.isatty() - except Exception: + except Exception: # pragma: no cover is_a_tty = False return is_a_tty @@ -812,6 +840,7 @@ def _isatty(stream): if os.name == "nt" or sys.platform.startswith("win"): if colorama is not None: + def _wrap_for_color(stream, color=None): try: cached = _color_stream_cache.get(stream) diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 8ea408f98d..76bdf7869e 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -17,17 +17,17 @@ from .backports.tempfile import _TemporaryFileWrapper from .compat import ( + IS_TYPE_CHECKING, + FileNotFoundError, Path, + PermissionError, ResourceWarning, TemporaryDirectory, - FileNotFoundError, - PermissionError, _fs_encoding, _NamedTemporaryFile, finalize, fs_decode, fs_encode, - IS_TYPE_CHECKING, ) if IS_TYPE_CHECKING: @@ -343,8 +343,19 @@ def set_write_bit(fn): user_sid = get_current_user() icacls_exe = _find_icacls_exe() or "icacls" from .misc import run + if user_sid: - _, err = run([icacls_exe, "/grant", "{0}:WD".format(user_sid), "''{0}''".format(fn), "/T", "/C", "/Q"]) + _, err = run( + [ + icacls_exe, + "/grant", + "{0}:WD".format(user_sid), + "''{0}''".format(fn), + "/T", + "/C", + "/Q", + ] + ) if not err: return @@ -390,7 +401,7 @@ def rmtree(directory, ignore_errors=False, onerror=None): raise -def _wait_for_files(path): +def _wait_for_files(path): # pragma: no cover """ Retry with backoff up to 1 second to delete files from a directory. @@ -448,7 +459,12 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except (OSError, IOError, FileNotFoundError, PermissionError) as e: + except ( + OSError, + IOError, + FileNotFoundError, + PermissionError, + ) as e: # pragma: no cover if e.errno in PERM_ERRORS: if e.errno == errno.ENOENT: return diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index 877ece82f7..1e67e482ef 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -19,18 +19,18 @@ try: import yaspin -except ImportError: +except ImportError: # pragma: no cover yaspin = None Spinners = None SpinBase = None -else: +else: # pragma: no cover import yaspin.spinners import yaspin.core Spinners = yaspin.spinners.Spinners SpinBase = yaspin.core.Yaspin -if os.name == "nt": +if os.name == "nt": # pragma: no cover def handler(signum, frame, spinner): """Signal handler, used to gracefully shut down the ``spinner`` instance @@ -44,7 +44,7 @@ def handler(signum, frame, spinner): sys.exit(0) -else: +else: # pragma: no cover def handler(signum, frame, spinner): """Signal handler, used to gracefully shut down the ``spinner`` instance @@ -92,7 +92,7 @@ def __exit__(self, exc_type, exc_val, tb): self._close_output_buffer() return False - def __getattr__(self, k): + def __getattr__(self, k): # pragma: no cover try: retval = super(DummySpinner, self).__getattribute__(k) except AttributeError: @@ -253,7 +253,7 @@ def hide_and_write(self, text, target=None): target.write(CLEAR_LINE) self._show_cursor(target=target) - def write(self, text): + def write(self, text): # pragma: no cover if not self.write_to_stdout: return self.write_err(text) stdout = self.stdout @@ -267,7 +267,7 @@ def write(self, text): stdout.write(text) self.out_buff.write(text) - def write_err(self, text): + def write_err(self, text): # pragma: no cover """Write error text in the terminal without breaking the spinner.""" stderr = self.stderr if self.stderr.closed: diff --git a/pipenv/vendor/yaspin/__version__.py b/pipenv/vendor/yaspin/__version__.py index f075dd36ab..23f00709c1 100644 --- a/pipenv/vendor/yaspin/__version__.py +++ b/pipenv/vendor/yaspin/__version__.py @@ -1 +1 @@ -__version__ = "0.14.1" +__version__ = "0.14.3" diff --git a/pipenv/vendor/yaspin/core.py b/pipenv/vendor/yaspin/core.py index dcaa9e4240..12960b3b74 100644 --- a/pipenv/vendor/yaspin/core.py +++ b/pipenv/vendor/yaspin/core.py @@ -82,6 +82,7 @@ def __init__( self._hide_spin = None self._spin_thread = None self._last_frame = None + self._stdout_lock = threading.Lock() # Signals @@ -253,43 +254,47 @@ def hide(self): thr_is_alive = self._spin_thread and self._spin_thread.is_alive() if thr_is_alive and not self._hide_spin.is_set(): - # set the hidden spinner flag - self._hide_spin.set() + with self._stdout_lock: + # set the hidden spinner flag + self._hide_spin.set() - # clear the current line - sys.stdout.write("\r") - self._clear_line() + # clear the current line + sys.stdout.write("\r") + self._clear_line() - # flush the stdout buffer so the current line can be rewritten to - sys.stdout.flush() + # flush the stdout buffer so the current line + # can be rewritten to + sys.stdout.flush() def show(self): """Show the hidden spinner.""" thr_is_alive = self._spin_thread and self._spin_thread.is_alive() if thr_is_alive and self._hide_spin.is_set(): - # clear the hidden spinner flag - self._hide_spin.clear() + with self._stdout_lock: + # clear the hidden spinner flag + self._hide_spin.clear() - # clear the current line so the spinner is not appended to it - sys.stdout.write("\r") - self._clear_line() + # clear the current line so the spinner is not appended to it + sys.stdout.write("\r") + self._clear_line() def write(self, text): """Write text in the terminal without breaking the spinner.""" # similar to tqdm.write() # https://pypi.python.org/pypi/tqdm#writing-messages - sys.stdout.write("\r") - self._clear_line() + with self._stdout_lock: + sys.stdout.write("\r") + self._clear_line() - _text = to_unicode(text) - if PY2: - _text = _text.encode(ENCODING) + _text = to_unicode(text) + if PY2: + _text = _text.encode(ENCODING) - # Ensure output is bytes for Py2 and Unicode for Py3 - assert isinstance(_text, builtin_str) + # Ensure output is bytes for Py2 and Unicode for Py3 + assert isinstance(_text, builtin_str) - sys.stdout.write("{0}\n".format(_text)) + sys.stdout.write("{0}\n".format(_text)) def ok(self, text="OK"): """Set Ok (success) finalizer to a spinner.""" @@ -312,7 +317,8 @@ def _freeze(self, final_text): # Should be stopped here, otherwise prints after # self._freeze call will mess up the spinner self.stop() - sys.stdout.write(self._last_frame) + with self._stdout_lock: + sys.stdout.write(self._last_frame) def _spin(self): while not self._stop_spin.is_set(): @@ -327,13 +333,13 @@ def _spin(self): out = self._compose_out(spin_phase) # Write - sys.stdout.write(out) - self._clear_line() - sys.stdout.flush() + with self._stdout_lock: + sys.stdout.write(out) + self._clear_line() + sys.stdout.flush() # Wait time.sleep(self._interval) - sys.stdout.write("\b") def _compose_color_func(self): fn = functools.partial( From aa2b1d2618105db9747b01e08b84722e551225dd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 20:39:38 -0400 Subject: [PATCH 33/81] Update piptools patch and re-lock Signed-off-by: Dan Ryan --- Pipfile.lock | 174 +++++++++++------- pipenv/patched/piptools/repositories/pypi.py | 4 +- setup.py | 4 +- .../vendoring/patches/patched/piptools.patch | 4 +- 4 files changed, 112 insertions(+), 74 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 5938db73a4..a208da76c4 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -27,6 +27,7 @@ "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.5" }, "appdirs": { @@ -55,6 +56,7 @@ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.1.0" }, "babel": { @@ -62,6 +64,7 @@ "sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669", "sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.6.0" }, "backports.functools-lru-cache": { @@ -110,6 +113,7 @@ "sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16", "sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.1.0" }, "bs4": { @@ -128,16 +132,16 @@ }, "cerberus": { "hashes": [ - "sha256:f5c2e048fb15ecb3c088d192164316093fcfa602a74b3386eefb2983aa7e800a" + "sha256:0be48fc0dc84f83202a5309c0aa17cd5393e70731a1698a50d118b762fbe6875" ], - "version": "==1.2" + "version": "==1.3.1" }, "certifi": { "hashes": [ - "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", - "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033" + "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", + "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae" ], - "version": "==2018.11.29" + "version": "==2019.3.9" }, "chardet": { "hashes": [ @@ -178,9 +182,9 @@ }, "distlib": { "hashes": [ - "sha256:57977cd7d9ea27986ec62f425630e4ddb42efe651ff80bc58ed8dbc3c7c21f19" + "sha256:ecb3d0e4f71d0fa7f38db6bcc276c7c9a1c6638a516d726495934a553eb3fbe0" ], - "version": "==0.2.8" + "version": "==0.2.9.post0" }, "docutils": { "hashes": [ @@ -195,6 +199,7 @@ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" ], + "markers": "python_version >= '2.7'", "version": "==0.3" }, "enum34": { @@ -209,17 +214,18 @@ }, "execnet": { "hashes": [ - "sha256:a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", - "sha256:fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83" + "sha256:027ee5d961afa01e97b90d6ccc34b4ed976702bc58e7f092b3c513ea288cb6d2", + "sha256:752a3786f17416d491f833a29217dda3ea4a471fc5269c492eebcee8cc4772d3" ], - "version": "==1.5.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.6.0" }, "first": { "hashes": [ - "sha256:3bb3de3582cb27071cfb514f00ed784dc444b7f96dc21e140de65fe00585c95e", - "sha256:41d5b64e70507d0c3ca742d68010a76060eea8a3d863e9b5130ab11a4a91aa0e" + "sha256:8d8e46e115ea8ac652c76123c0865e3ff18372aef6f03c22809ceefcea9dec86", + "sha256:ff285b08c55f8c97ce4ea7012743af2495c9f1291785f163722bd36f6af6d3bf" ], - "version": "==2.0.1" + "version": "==2.0.2" }, "flake8": { "hashes": [ @@ -234,6 +240,7 @@ "sha256:12bd5e41f372b2190e8d754b6e5829c2f11dbc764e10b30f57e59f829c9ca1da", "sha256:a94931c46a33469ec26f09b652bc88f55a8f5cc77807b90ca7bbafef1108fd7d" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.5.3" }, "flask": { @@ -279,6 +286,7 @@ "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "incremental": { @@ -311,6 +319,7 @@ "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.0" }, "jedi": { @@ -323,10 +332,10 @@ }, "jinja2": { "hashes": [ - "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", - "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", + "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" ], - "version": "==2.10" + "version": "==2.10.1" }, "markupsafe": { "hashes": [ @@ -359,6 +368,7 @@ "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "mccabe": { @@ -370,10 +380,11 @@ }, "mock": { "hashes": [ - "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1", - "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba" + "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", + "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], - "version": "==2.0.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==3.0.5" }, "more-itertools": { "hashes": [ @@ -395,14 +406,15 @@ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.0" }, "parso": { "hashes": [ - "sha256:4580328ae3f548b358f4901e38c0578229186835f0fa0846e47369796dd5bcc9", - "sha256:68406ebd7eafe17f8e40e15a84b56848eccbf27d7c1feb89e93d8fca395706db" + "sha256:17cc2d7a945eb42c3569d4564cdf49bde221bc2b552af3eca9c1aad517dcdd33", + "sha256:2e9574cb12e7112a87253e14e2c380ce312060269d04bd018478a3c92ea9a376" ], - "version": "==0.3.4" + "version": "==0.4.0" }, "parver": { "hashes": [ @@ -428,10 +440,10 @@ }, "pbr": { "hashes": [ - "sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843", - "sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824" + "sha256:6901995b9b686cb90cceba67a0f6d4d14ae003cd59bc12beb61549bdfbe3bc89", + "sha256:d950c64aeea5456bbd147468382a5bb77fe692c13c9f00f0219814ce5b642755" ], - "version": "==5.1.3" + "version": "==5.2.0" }, "pep517": { "hashes": [ @@ -445,6 +457,7 @@ "sha256:3bc24ec050a6b9eea35419467237e4f47eaf806dadc9999bf887355c377edea7", "sha256:edb4cf3c509eab2f36b55c1ac1a59a4c485ccd537cc87934d74950880f641256" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.3.2" }, "pipenv": { @@ -467,20 +480,23 @@ "sha256:c0e3553c1e581d8423daccbd825789c6e7f29b7d9e00e5331b12e1642a1a26d3", "sha256:dde5d525cf5f0cbad4d938c83b93db17887918daf63c13eafed257c4f61b07b4" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.2.2" }, "pluggy": { "hashes": [ - "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", - "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" + "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180", + "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a" ], - "version": "==0.9.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.11.0" }, "py": { "hashes": [ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.8.0" }, "pycodestyle": { @@ -488,6 +504,7 @@ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.5.0" }, "pyflakes": { @@ -495,6 +512,7 @@ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.1.1" }, "pygments": { @@ -506,17 +524,19 @@ }, "pyparsing": { "hashes": [ - "sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a", - "sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3" + "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a", + "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03" ], - "version": "==2.3.1" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.4.0" }, "pytest": { "hashes": [ - "sha256:3f193df1cfe1d1609d4c583838bea3d532b18d6160fd3f55c9447fdca30848ec", - "sha256:e246cf173c01169b9617fc07264b7b1316e78d7a650055235d6d897bc80d9660" + "sha256:1a8aa4fa958f8f451ac5441f3ac130d9fc86ea38780dd2715e6d5c5882700b24", + "sha256:b8bf138592384bd4e87338cb0f256bf5f615398a649d4bd83915f0e4047a5ca6" ], - "version": "==3.10.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==4.5.0" }, "pytest-forked": { "hashes": [ @@ -539,10 +559,11 @@ }, "pytest-xdist": { "hashes": [ - "sha256:4a201bb3ee60f5dd6bb40c5209d4e491cecc4d5bafd656cfb10f86178786e568", - "sha256:d03d1ff1b008458ed04fa73e642d840ac69b4107c168e06b71037c62d7813dd4" + "sha256:b0bb4b0293ee8657b9eb3ff334a3b6aac4db74fd4a86b81e1982c879237a47eb", + "sha256:f83a485293e81fd57c8a5a85a3f12473a532c5ca7dec518857cbb72766bb526c" ], - "version": "==1.26.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.28.0" }, "pytoml": { "hashes": [ @@ -570,6 +591,7 @@ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" ], + "index": "pypi", "version": "==2.21.0" }, "requests-toolbelt": { @@ -581,10 +603,11 @@ }, "requirementslib": { "hashes": [ - "sha256:04c19bfe6f2c4dbfe3cc5115744c2079b0f3ce52c61c9d130a1ccf2b9896f812", - "sha256:fdfc75d0ce418e80fa3b573ff078a5732ec1f11415afc2d4280809a1b5b2575d" + "sha256:a5bcff2861eea9358e90d6a4234f0bf4ad0ba730f86a4ea4680f53365b7b4735", + "sha256:ae0c2fce1b33c9c7b171895ab11472bd7be9c45f6214aad97ceaf83511d78d93" ], - "version": "==1.4.2" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.5.0" }, "resolvelib": { "hashes": [ @@ -622,6 +645,7 @@ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1'", "version": "==1.12.0" }, "snowballstemmer": { @@ -633,10 +657,10 @@ }, "soupsieve": { "hashes": [ - "sha256:afa56bf14907bb09403e5d15fbed6275caa4174d36b975226e3b67a3bb6e2c4b", - "sha256:eaed742b48b1f3e2d45ba6f79401b2ed5dc33b2123dfe216adb90d4bfa0ade26" + "sha256:6898e82ecb03772a0d82bd0d0a10c0d6dcc342f77e0701d0ec4a8271be465ece", + "sha256:b20eff5e564529711544066d7dc0f7661df41232ae263619dede5059799cdfca" ], - "version": "==1.8" + "version": "==1.9.1" }, "sphinx": { "hashes": [ @@ -682,6 +706,7 @@ "sha256:d6506342615d051bc961f70bfcfa3d29b6616cc08a3ddfd4bc24196f16fd4ec2", "sha256:f077456d35303e7908cc233b340f71e0bec96f63429997f38ca9272b7d64029e" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.5.3" }, "towncrier": { @@ -692,10 +717,11 @@ }, "tqdm": { "hashes": [ - "sha256:d385c95361699e5cf7622485d9b9eae2d4864b21cd5a2374a9c381ffed701021", - "sha256:e22977e3ebe961f72362f6ddfb9197cc531c9737aaf5f607ef09740c849ecd05" + "sha256:0a860bf2683fdbb4812fe539a6c22ea3f1777843ea985cb8c3807db448a0f7ab", + "sha256:e288416eecd4df19d12407d0c913cbf77aa8009d7fddb18f632aded3bdbdda6b" ], - "version": "==4.31.1" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1'", + "version": "==4.32.1" }, "twine": { "hashes": [ @@ -716,35 +742,45 @@ }, "urllib3": { "hashes": [ - "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", - "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], - "version": "==1.24.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", + "version": "==1.24.3" }, "virtualenv": { "hashes": [ - "sha256:6aebaf4dd2568a0094225ebbca987859e369e3e5c22dc7d52e5406d504890417", - "sha256:984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39" + "sha256:99acaf1e35c7ccf9763db9ba2accbca2f4254d61d1912c5ee364f9cc4a8942a0", + "sha256:fe51cdbf04e5d8152af06c075404745a7419de27495a83f0d72518ad50be3ce8" ], - "version": "==16.4.3" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==16.6.0" }, "virtualenv-clone": { "hashes": [ - "sha256:217bd3f0880c9f85672c0bcc9ad9e0354ab7dfa89c2f117e63aa878b4279f5bf", - "sha256:316c8a05432a7adb5e461709759aca18c51433ffc2c33e2e80c9e51c452d339f", - "sha256:f2a07ed255f3abaceef8c8442512d8cdb2ba9f867e212d8a51680c7790a85033" + "sha256:532f789a5c88adf339506e3ca03326f20ee82fd08ee5586b44dc859b5b4468c5", + "sha256:c88ae171a11b087ea2513f260cdac9232461d8e9369bcd1dc143fc399d220557" ], - "version": "==0.5.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.5.3" }, "vistir": { "extras": [ "spinner" ], "hashes": [ - "sha256:1a3d16d541de7ff098037260506a9efc5f6967176137988bd2cbfdd13b240ba0", - "sha256:68896b279f64ff078e06ffd41f77181ef7cdedbeaa5f453cae3cfdd97d41dbcf" + "sha256:00af96b75157b299616f47657ed34368e92e01d039100368c9dcd94897e3c109", + "sha256:bbe040ce656f1de9b5f75c953abe49af4d1ba6fdf8f1f4b8db3e63cfd2dad24a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.4.1" + }, + "wcwidth": { + "hashes": [ + "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", + "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" ], - "version": "==0.3.1" + "version": "==0.1.7" }, "webencodings": { "hashes": [ @@ -755,24 +791,26 @@ }, "werkzeug": { "hashes": [ - "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c", - "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b" + "sha256:865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", + "sha256:a0b915f0815982fb2a09161cb8f31708052d0951c3ba433ccc5e1aa276507ca6" ], - "version": "==0.14.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.15.4" }, "wheel": { "hashes": [ - "sha256:66a8fd76f28977bb664b098372daef2b27f60dc4d1688cfab7b37a09448f0e9d", - "sha256:8eb4a788b3aec8abf5ff68d4165441bc57420c9f64ca5f471f58c3969fe08668" + "sha256:5e79117472686ac0c4aef5bad5172ea73a1c2d1646b808c35926bd26bdfb0c08", + "sha256:62fcfa03d45b5b722539ccbc07b190e4bfff4bb9e3a4d470dd9f6a0981002565" ], - "version": "==0.33.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.33.4" }, "yaspin": { "hashes": [ - "sha256:441f8a6761e347652d04614899fd0a9cfda7439e2d5682e664bd31230c656176", - "sha256:d3ebcf8162e0ef8bb5484b8751d5b6d2fbf0720112c81f64614c308576a03b1d" + "sha256:8e6d2e2b207ba18510f190e04a25273a32d1f192af9c9a77ebe46deaca799dfa", + "sha256:94b7602f0dc59d26a15e63cefff6aaf644c58dd77fc4e1ef675d3ba2c302ed06" ], - "version": "==0.14.1" + "version": "==0.14.3" } } } diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index 4e44b90348..9d81bd5542 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -14,8 +14,8 @@ from packaging.specifiers import SpecifierSet, Specifier os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") +import pip_shims from pip_shims.shims import VcsSupport, WheelCache, InstallationError -from pip_shims.shims import Resolver as PipResolver from .._compat import ( @@ -299,7 +299,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache): reqset = RequirementSet() ireq.is_direct = True # reqset.add_requirement(ireq) - resolver = PipResolver(**resolver_kwargs) + resolver = pip_shims.shims.Resolver(**resolver_kwargs) resolver.require_hashes = False results = resolver._resolve_one(reqset, ireq) diff --git a/setup.py b/setup.py index 3fb4859106..34c43a9768 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ required = [ "pip>=9.0.1", "certifi", - "setuptools>=36.2.1", + "setuptools>=41.0.0", "virtualenv-clone>=0.2.5", "virtualenv", 'enum34; python_version<"3"' @@ -131,7 +131,7 @@ def run(self): setup_requires=["invoke", "parver", ], install_requires=required, extras_require={ - "test": ["pytest<4.0", "pytest-tap", "pytest-xdist", "flaky", "mock"], + "test": ["pytest", "pytest-tap", "pytest-xdist", "flaky", "mock"], "dev": ["towncrier", "bs4"], }, include_package_data=True, diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 3d3cb99b77..84259ad369 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -162,8 +162,8 @@ index e54ae08..75b8208 100644 +from packaging.specifiers import SpecifierSet, Specifier + +os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") ++import pip_shims.shims +from pip_shims.shims import VcsSupport, WheelCache, InstallationError -+from pip_shims.shims import Resolver as PipResolver + + from .._compat import ( @@ -400,7 +400,7 @@ index e54ae08..75b8208 100644 ireq.is_direct = True - reqset.add_requirement(ireq) + # reqset.add_requirement(ireq) - resolver = PipResolver(**resolver_kwargs) + resolver = pip_shims.shims.Resolver(**resolver_kwargs) resolver.require_hashes = False results = resolver._resolve_one(reqset, ireq) - reqset.cleanup_files() From 204f6894e5568b3bc3d05a36dbea5e45b17dcef5 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 18 Apr 2019 18:04:17 -0400 Subject: [PATCH 34/81] Refactor resolver into resolver file Signed-off-by: Dan Ryan --- pipenv/resolver.py | 4 --- pipenv/utils.py | 70 ---------------------------------------------- 2 files changed, 74 deletions(-) diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 4f4df8a55a..fa76765ee3 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -783,10 +783,6 @@ def main(): all(getattr(stream, method, None) for stream in [sys.stdout, sys.stderr] for method in ["write", "isatty"]) and all(stream.isatty() for stream in [sys.stdout, sys.stderr]) ): - # stderr_wrapper = colorama.AnsiToWin32(sys.stderr, autoreset=False, convert=None, strip=None) - # stdout_wrapper = colorama.AnsiToWin32(sys.stdout, autoreset=False, convert=None, strip=None) - # sys.stderr = stderr_wrapper.stream - # sys.stdout = stdout_wrapper.stream colorama.init(wrap=False) elif os.name != "nt": colorama.init() diff --git a/pipenv/utils.py b/pipenv/utils.py index 3d526974f3..6174d015a8 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -911,76 +911,6 @@ def actually_resolve_deps( hashes = resolver.resolve_hashes() resolver.resolve_constraints() results = resolver.clean_results() - # constraints, skipped, index_lookup, markers_lookup = Resolver.get_metadata( - # deps, index_lookup, markers_lookup, project, sources, - # ) - # resolver = Resolver(constraints, req_dir, project, sources, clear=clear, pre=pre) - # resolved_tree = resolver.resolve() - # hashes = resolver.resolve_hashes() - # reqs = [(Requirement.from_ireq(ireq), ireq) for ireq in resolved_tree] - # results = {} - # for req, ireq in reqs: - # if (req.vcs and req.editable and not req.is_direct_url): - # continue - # collected_hashes = resolver.collect_hashes(ireq) - # if collected_hashes: - # req = req.add_hashes(collected_hashes) - # elif resolver._should_include_hash(ireq): - # existing_hashes = hashes.get(ireq, set()) - # discovered_hashes = existing_hashes | resolver.get_hash(ireq) - # if discovered_hashes: - # req = req.add_hashes(discovered_hashes) - # resolver.hashes[ireq] = discovered_hashes - # if req.specifiers: - # version = str(req.get_version()) - # else: - # version = None - # index = index_lookup.get(req.normalized_name) - # markers = markers_lookup.get(req.normalized_name) - # req.index = index - # name, pf_entry = req.pipfile_entry - # name = pep423_name(req.name) - # entry = {} - # if isinstance(pf_entry, six.string_types): - # entry["version"] = pf_entry.lstrip("=") - # else: - # entry.update(pf_entry) - # if version is not None: - # entry["version"] = version - # if req.line_instance.is_direct_url: - # entry["file"] = req.req.uri - # if collected_hashes: - # entry["hashes"] = sorted(set(collected_hashes)) - # entry["name"] = name - # if index: # and index != next(iter(project.sources), {}).get("name"): - # entry.update({"index": index}) - # if markers: - # entry.update({"markers": markers}) - # entry = translate_markers(entry) - # if name in results: - # results[name].update(entry) - # else: - # results[name] = entry - # for k in list(skipped.keys()): - # req = Requirement.from_pipfile(k, skipped[k]) - # ref = None - # if req.is_vcs: - # ref = req.commit_hash - # ireq = req.as_ireq() - # entry = skipped[k].copy() - # entry["name"] = req.name - # ref = ref if ref is not None else entry.get("ref") - # if ref: - # entry["ref"] = ref - # if resolver._should_include_hash(ireq): - # collected_hashes = resolver.collect_hashes(ireq) - # if collected_hashes: - # entry["hashes"] = sorted(set(collected_hashes)) - # if k in results: - # results[k].update(entry) - # else: - # results[k] = entry - # results = list(results.values()) for warning in warning_list: _show_warning(warning.message, warning.category, warning.filename, warning.lineno, warning.line) From e93ce98cb751b5815ac340e93c48f3a209e4124f Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 19 Apr 2019 02:57:01 -0400 Subject: [PATCH 35/81] Update vendored deps Signed-off-by: Dan Ryan --- Pipfile | 11 +------ Pipfile.lock | 91 ++++++++++++++++++++++++++-------------------------- 2 files changed, 47 insertions(+), 55 deletions(-) diff --git a/Pipfile b/Pipfile index 8de9428f38..399a089778 100644 --- a/Pipfile +++ b/Pipfile @@ -1,22 +1,13 @@ [dev-packages] -pipenv = {path = ".", editable = true, extras = ["test"]} -"flake8" = ">=3.3.0,<4" -sphinx = "<=1.5.5" -twine = "*" +pipenv = {path = ".", editable = true, extras = ["tests", "dev"]} sphinx-click = "*" click = "*" pytest_pypi = {path = "./tests/pytest-pypi", editable = true} stdeb = {version="*", markers="sys_platform == 'linux'"} -black = {version="*", markers="python_version >= '3.6'"} -pytz = "*" -towncrier = {git = "https://github.com/hawkowl/towncrier.git", editable = true, ref = "master"} -parver = "*" -invoke = "*" jedi = "*" isort = "*" rope = "*" passa = {editable = true, git = "https://github.com/sarugaku/passa.git"} -bs4 = "*" [packages] diff --git a/Pipfile.lock b/Pipfile.lock index a208da76c4..5dc9d19b69 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "0cdfbd95f33a9edb69dede4bc868f24f7b770ba6b06ce73f6de5f175544e996d" + "sha256": "ed7727922609f3e42737051e7058e8aa7dc64325eee72046e44a6a14db900b02" }, "pipfile-spec": 6, "requires": {}, @@ -101,12 +101,11 @@ }, "black": { "hashes": [ - "sha256:817243426042db1d36617910df579a54f1afd659adb96fc5032fcf4b36209739", - "sha256:e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5" + "sha256:09a9dcb7c46ed496a9850b76e4e825d6049ecd38b611f1224857a79bd985a8cf", + "sha256:68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c" ], - "index": "pypi", "markers": "python_version >= '3.6'", - "version": "==18.9b0" + "version": "==19.3b0" }, "bleach": { "hashes": [ @@ -120,7 +119,6 @@ "hashes": [ "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a" ], - "index": "pypi", "version": "==0.0.1" }, "cached-property": { @@ -174,12 +172,6 @@ "markers": "python_version < '3.2'", "version": "==3.7.1" }, - "cursor": { - "hashes": [ - "sha256:33f279a17789c04efd27a92501a0dad62bb011f8a4cdff93867c798d26508940" - ], - "version": "==1.3.4" - }, "distlib": { "hashes": [ "sha256:ecb3d0e4f71d0fa7f38db6bcc276c7c9a1c6638a516d726495934a553eb3fbe0" @@ -229,11 +221,11 @@ }, "flake8": { "hashes": [ - "sha256:c3ba1e130c813191db95c431a18cb4d20a468e98af7a77e2181b68574481ad36", - "sha256:fd9ddf503110bf3d8b1d270e8c673aab29ccb3dd6abf29bae1f54e5116ab4a91" + "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661", + "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8" ], - "index": "pypi", - "version": "==3.7.5" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==3.7.7" }, "flaky": { "hashes": [ @@ -302,17 +294,15 @@ "sha256:dc492f8f17a0746e92081aec3f86ae0b4750bf41607ea2ad87e5a7b5705121b7", "sha256:eb6f9262d4d25b40330fb21d1e99bf0f85011ccc3526980f8a3eaedd4b43892e" ], - "index": "pypi", "version": "==1.2.0" }, "isort": { "hashes": [ - "sha256:1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", - "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", - "sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" + "sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43", + "sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a" ], "index": "pypi", - "version": "==4.3.4" + "version": "==4.3.17" }, "itsdangerous": { "hashes": [ @@ -324,11 +314,11 @@ }, "jedi": { "hashes": [ - "sha256:571702b5bd167911fe9036e5039ba67f820d6502832285cde8c881ab2b2149fd", - "sha256:c8481b5e59d34a5c7c42e98f6625e633f6ef59353abea6437472c7ec2093f191" + "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b", + "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c" ], "index": "pypi", - "version": "==0.13.2" + "version": "==0.13.3" }, "jinja2": { "hashes": [ @@ -421,14 +411,13 @@ "sha256:1b37a691af145a3a193eff269d53ba5b2ab16dfbb65d47d85360755919f5fe4b", "sha256:72d056b8f8883ac90eef5554a9c8a47fac39d3b66479f3d2c8d5bc21b849cdba" ], - "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.2.1" }, "passa": { "editable": true, "git": "https://github.com/sarugaku/passa.git", - "ref": "a2ba0b30c86339cae5ef3a03046fc9c583452c40", - "version": "==0.3.1.dev0" + "ref": "a2ba0b30c86339cae5ef3a03046fc9c583452c40" }, "pathlib2": { "hashes": [ @@ -462,8 +451,11 @@ }, "pipenv": { "editable": true, - "path": ".", - "version": "==2018.11.27.dev0" + "extras": [ + "dev", + "tests" + ], + "path": "." }, "pkginfo": { "hashes": [ @@ -547,8 +539,7 @@ }, "pytest-pypi": { "editable": true, - "path": "./tests/pytest-pypi", - "version": "==0.1.1" + "path": "./tests/pytest-pypi" }, "pytest-tap": { "hashes": [ @@ -573,11 +564,10 @@ }, "pytz": { "hashes": [ - "sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9", - "sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c" + "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda", + "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141" ], - "index": "pypi", - "version": "==2018.9" + "version": "==2019.1" }, "readme-renderer": { "hashes": [ @@ -618,10 +608,12 @@ }, "rope": { "hashes": [ - "sha256:031eb54b3eeec89f4304ede816995ed2b93a21e6fba16bd02aff10a0d6c257b7" + "sha256:6b728fdc3e98a83446c27a91fc5d56808a004f8beab7a31ab1d7224cecc7d969", + "sha256:c5c5a6a87f7b1a2095fb311135e2a3d1f194f5ecb96900fdd0a9100881f48aaf", + "sha256:f0dcf719b63200d492b85535ebe5ea9b29e0d0b8aebeb87fe03fc1a65924fdaf" ], "index": "pypi", - "version": "==0.12.0" + "version": "==0.14.0" }, "scandir": { "hashes": [ @@ -664,11 +656,11 @@ }, "sphinx": { "hashes": [ - "sha256:11f271e7a9398385ed730e90f0bb41dc3815294bdcd395b46ed2d033bc2e7d87", - "sha256:4064ea6c56feeb268838cb8fbbee507d0c3d5d92fa63a7df935a916b52c9e2f5" + "sha256:9f3e17c64b34afc653d7c5ec95766e03043cc6d80b0de224f59b6b6e19d37c3c", + "sha256:c7658aab75c920288a8cf6f09f244c6cfdae30d82d803ac1634d9f223a80ca08" ], - "index": "pypi", - "version": "==1.5.5" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.8.5" }, "sphinx-click": { "hashes": [ @@ -678,6 +670,14 @@ "index": "pypi", "version": "==2.0.1" }, + "sphinxcontrib-websupport": { + "hashes": [ + "sha256:4044751a075b6560f155c96f9fec6bc5198cd5307e5db9f77c7b1c5247ac9a09", + "sha256:c1b918b1b41cde045cdb9755941086b4ce4ebbfd7bff41d10ffb6d325779cbf9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.1.1.dev20190321" + }, "stdeb": { "hashes": [ "sha256:0ed2c2cc6b8ba21da7d646c6f37ca60b22e9e4950e3cec6bcd9c2e7e57e3747e" @@ -710,9 +710,10 @@ "version": "==0.5.3" }, "towncrier": { - "editable": true, - "git": "https://github.com/hawkowl/towncrier.git", - "ref": "ecd438c9c0ef132a92aba2eecc4dc672ccf9ec63", + "hashes": [ + "sha256:48251a1ae66d2cf7e6fa5552016386831b3e12bb3b2d08eb70374508c17a8196", + "sha256:de19da8b8cb44f18ea7ed3a3823087d2af8fcf497151bb9fd1e1b092ff56ed8d" + ], "version": "==19.2.0" }, "tqdm": { @@ -728,7 +729,7 @@ "sha256:0fb0bfa3df4f62076cab5def36b1a71a2e4acb4d1fa5c97475b048117b1a6446", "sha256:d6c29c933ecfc74e9b1d9fa13aa1f87c5d5770e119f5a4ce032092f0ff5b14dc" ], - "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.13.0" }, "typing": { From 37a19819c81b7120a208abcbc2238b885ca8bbf6 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 19 Apr 2019 02:58:24 -0400 Subject: [PATCH 36/81] Update echo calls, exception formats, dependencies Signed-off-by: Dan Ryan --- pipenv/__init__.py | 2 +- pipenv/exceptions.py | 95 ++++++++++++++++++++++---------------------- pipenv/resolver.py | 16 ++++---- pipenv/utils.py | 21 +++++----- setup.py | 22 +++++++--- 5 files changed, 83 insertions(+), 73 deletions(-) diff --git a/pipenv/__init__.py b/pipenv/__init__.py index 7128163aa8..a83f94e8b1 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -40,7 +40,7 @@ from .vendor import colorama replace_with_text_stream("stdout") replace_with_text_stream("stderr") -colorama.init(wrap=False) +# colorama.init(wrap=False) from .cli import cli from . import resolver diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 495a46bfbb..23a1eb075f 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -11,19 +11,19 @@ from . import environments from ._compat import decode_for_output from .patched import crayons -from .vendor.click._compat import get_text_stderr from .vendor.click.exceptions import ( Abort, BadOptionUsage, BadParameter, ClickException, Exit, FileError, MissingParameter, UsageError ) from .vendor.click.types import Path -from .vendor.click.utils import echo as click_echo +from .vendor.vistir.misc import echo as click_echo import vistir KNOWN_EXCEPTIONS = { "PermissionError": "Permission denied:", } + def handle_exception(exc_type, exception, traceback, hook=sys.excepthook): if environments.is_verbose() or not issubclass(exc_type, ClickException): hook(exc_type, exception, traceback) @@ -60,7 +60,7 @@ def __init__(self, message=None, **kwargs): def show(self, file=None): if file is None: - file = get_text_stderr() + file = vistir.misc.get_text_stderr() if self.extra: if isinstance(self.extra, six.string_types): self.extra = [self.extra,] @@ -84,7 +84,7 @@ def __init__(self, cmd, out="", err="", exit_code=1): def show(self, file=None): if file is None: - file = get_text_stderr() + file = vistir.misc.get_text_stderr() click_echo("{0} {1}".format( crayons.red("Error running command: "), crayons.white(decode_for_output("$ {0}".format(self.cmd), file), bold=True) @@ -108,12 +108,12 @@ def __init__(self, contents="", error_text=""): def show(self, file=None): if file is None: - file = get_text_stderr() + file = vistir.misc.get_text_stderr() message = "{0}\n{1}".format( crayons.white("Failed parsing JSON results:", bold=True), decode_for_output(self.message.strip(), file) ) - click_echo(self.message, err=True) + click_echo(message, err=True) if self.error_text: click_echo("{0} {1}".format( crayons.white("ERROR TEXT:", bold=True), @@ -136,7 +136,7 @@ def __init__(self, message=None, ctx=None, **kwargs): def show(self, file=None): if file is None: - file = get_text_stderr() + file = vistir.misc.get_text_stderr() color = None if self.ctx is not None: color = self.ctx.color @@ -175,7 +175,7 @@ def __init__(self, filename, message=None, **kwargs): def show(self, file=None): if file is None: - file = get_text_stderr() + file = vistir.misc.get_text_stderr() if self.extra: if isinstance(self.extra, six.string_types): self.extra = [self.extra,] @@ -187,13 +187,16 @@ def show(self, file=None): class PipfileNotFound(PipenvFileError): def __init__(self, filename="Pipfile", extra=None, **kwargs): extra = kwargs.pop("extra", []) - message = ("{0} {1}".format( + message = ( + "{0} {1}".format( crayons.red("Aborting!", bold=True), - crayons.white("Please ensure that the file exists and is located in your" - " project root directory.", bold=True) + crayons.white( + "Please ensure that the file exists and is located in your" + " project root directory.", bold=True + ) ) ) - super(PipfileNotFound, self).__init__(filename, message=decode_for_output(message), extra=extra, **kwargs) + super(PipfileNotFound, self).__init__(filename, message=message, extra=extra, **kwargs) class LockfileNotFound(PipenvFileError): @@ -204,7 +207,7 @@ def __init__(self, filename="Pipfile.lock", extra=None, **kwargs): crayons.red("$ pipenv lock", bold=True), crayons.white("before you can continue.", bold=True) ) - super(LockfileNotFound, self).__init__(filename, message=decode_for_output(message), extra=extra, **kwargs) + super(LockfileNotFound, self).__init__(filename, message=message, extra=extra, **kwargs) class DeployException(PipenvUsageError): @@ -212,13 +215,13 @@ def __init__(self, message=None, **kwargs): if not message: message = crayons.normal("Aborting deploy", bold=True) extra = kwargs.pop("extra", []) - PipenvUsageError.__init__(self, message=decode_for_output(message), extra=extra, **kwargs) + PipenvUsageError.__init__(self, message=message, extra=extra, **kwargs) class PipenvOptionsError(PipenvUsageError): def __init__(self, option_name, message=None, ctx=None, **kwargs): extra = kwargs.pop("extra", []) - PipenvUsageError.__init__(self, message=decode_for_output(message), ctx=ctx, **kwargs) + PipenvUsageError.__init__(self, message=message, ctx=ctx, **kwargs) self.extra = extra self.option_name = option_name @@ -245,7 +248,7 @@ def __init__(self, hint=None, **kwargs): hint = "{0} {1}".format(crayons.red("ERROR (PACKAGE NOT INSTALLED):"), hint) filename = project.pipfile_location extra = kwargs.pop("extra", []) - PipenvFileError.__init__(self, filename, decode_for_output(hint), extra=extra, **kwargs) + PipenvFileError.__init__(self, filename, hint, extra=extra, **kwargs) class SetupException(PipenvException): @@ -261,7 +264,7 @@ def __init__(self, message=None, **kwargs): "There was an unexpected error while activating your virtualenv. " "Continuing anyway..." ) - PipenvException.__init__(self, decode_for_output(message), **kwargs) + PipenvException.__init__(self, message, **kwargs) class VirtualenvActivationException(VirtualenvException): @@ -272,7 +275,7 @@ def __init__(self, message=None, **kwargs): "not activated. Continuing anyway…" ) self.message = message - VirtualenvException.__init__(self, decode_for_output(message), **kwargs) + VirtualenvException.__init__(self, message, **kwargs) class VirtualenvCreationException(VirtualenvException): @@ -280,23 +283,26 @@ def __init__(self, message=None, **kwargs): if not message: message = "Failed to create virtual environment." self.message = message - VirtualenvException.__init__(self, decode_for_output(message), **kwargs) + VirtualenvException.__init__(self, message, **kwargs) class UninstallError(PipenvException): def __init__(self, package, command, return_values, return_code, **kwargs): - extra = [crayons.blue("Attempted to run command: {0}".format( - crayons.yellow("$ {0!r}".format(command), bold=True) - )),] + extra = [ + "{0} {1}".format( + crayons.blue("Attempted to run command: "), + crayons.yellow("$ {0!r}".format(command), bold=True + ) + )] extra.extend([crayons.blue(line.strip()) for line in return_values.splitlines()]) if isinstance(package, (tuple, list, set)): package = " ".join(package) message = "{0!s} {1!s}...".format( crayons.normal("Failed to uninstall package(s)"), - crayons.yellow(str(package), bold=True) + crayons.yellow("{0}!s".format(package), bold=True) ) self.exit_code = return_code - PipenvException.__init__(self, message=decode_for_output(message), extra=extra) + PipenvException.__init__(self, message=message, extra=extra) self.extra = extra @@ -304,36 +310,34 @@ class InstallError(PipenvException): def __init__(self, package, **kwargs): package_message = "" if package is not None: - package_message = crayons.normal("Couldn't install package {0}\n".format( - crayons.white(package, bold=True) - )) - message = "{0} {1} {2}".format( - crayons.red("ERROR:", bold=True), - package_message, + package_message = "Couldn't install package: {0}\n".format( + crayons.white("{0!s}".format(package), bold=True) + ) + message = "{0} {1}".format( + "{0}".format(package_message), crayons.yellow("Package installation failed...") ) extra = kwargs.pop("extra", []) - PipenvException.__init__(self, message=decode_for_output(message), extra=extra, **kwargs) + PipenvException.__init__(self, message=message, extra=extra, **kwargs) class CacheError(PipenvException): def __init__(self, path, **kwargs): - message = "{0} {1} {2}\n{0}".format( - crayons.red("ERROR:", bold=True), + message = "{0} {1}\n{2}".format( crayons.blue("Corrupt cache file"), - crayons.white(path), + crayons.white("{0!s}".format(path)), crayons.white('Consider trying "pipenv lock --clear" to clear the cache.') ) - PipenvException.__init__(self, message=decode_for_output(message)) + PipenvException.__init__(self, message=message) class DependencyConflict(PipenvException): def __init__(self, message): - extra = [decode_for_output("{0} {1}".format( - crayons.red("ERROR:", bold=True), - crayons.white("A dependency conflict was detected and could not be resolved.", bold=True), - )),] - super(DependencyConflict, self).__init__(decode_for_output(message), extra=extra) + extra = ["{0} {1}".format( + crayons.red("The operation failed...", bold=True), + crayons.red("A dependency conflict was detected and could not be resolved."), + )] + PipenvException.__init__(self, message, extra=extra) class ResolutionFailure(PipenvException): @@ -355,9 +359,7 @@ def __init__(self, message, no_version_found=False): ) if "no version found at all" in message: no_version_found = True - message = "{0} {1}".format( - crayons.red("ERROR:", bold=True), crayons.yellow(message) - ) + message = crayons.yellow("{0}".format(message)) if no_version_found: message = "{0}\n{1}".format( message, @@ -366,7 +368,7 @@ def __init__(self, message, no_version_found=False): "See PEP440 for more information." ) ) - super(ResolutionFailure, self).__init__(decode_for_output(message), extra=extra) + PipenvException.__init__(self, message, extra=extra) class RequirementError(PipenvException): @@ -404,9 +406,8 @@ def __init__(self, req=None): crayons.normal(decode_for_output("Failed creating requirement instance")), crayons.white(decode_for_output("{0!r}".format(req_value))) ) - extra = [crayons.normal(decode_for_output(str(req)))] - super(RequirementError, self).__init__(message, extra=extra) - super(ResolutionFailure, self).__init__(fix_utf8(message), extra=extra) + extra = [str(req)] + PipenvException.__init__(self, message, extra=extra) def prettify_exc(error): diff --git a/pipenv/resolver.py b/pipenv/resolver.py index fa76765ee3..497d2343c5 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -778,14 +778,14 @@ def main(): warnings.simplefilter("ignore", category=ResourceWarning) replace_with_text_stream("stdout") replace_with_text_stream("stderr") - from pipenv.vendor import colorama - if os.name == "nt" and ( - all(getattr(stream, method, None) for stream in [sys.stdout, sys.stderr] for method in ["write", "isatty"]) and - all(stream.isatty() for stream in [sys.stdout, sys.stderr]) - ): - colorama.init(wrap=False) - elif os.name != "nt": - colorama.init() + # from pipenv.vendor import colorama + # if os.name == "nt" and ( + # all(getattr(stream, method, None) for stream in [sys.stdout, sys.stderr] for method in ["write", "isatty"]) and + # all(stream.isatty() for stream in [sys.stdout, sys.stderr]) + # ): + # colorama.init(wrap=False) + # elif os.name != "nt": + # colorama.init() os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = str("1") os.environ["PYTHONIOENCODING"] = str("utf-8") os.environ["PYTHONUNBUFFERED"] = str("1") diff --git a/pipenv/utils.py b/pipenv/utils.py index 6174d015a8..9a39a3e238 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -579,16 +579,16 @@ def pip_options(self): pip_options, _ = self.pip_command.parser.parse_args(self.pip_args) pip_options.cache_dir = environments.PIPENV_CACHE_DIR self._pip_options = pip_options - if environments.is_verbose(): - click_echo( - crayons.blue("Using pip: {0}".format(" ".join(self.pip_args))), err=True - ) return self._pip_options @property def session(self): if self._session is None: self._session = self.pip_command._build_session(self.pip_options) + if environments.is_verbose(): + click_echo( + crayons.blue("Using pip: {0}".format(" ".join(self.pip_args))), err=True + ) return self._session @property @@ -938,6 +938,7 @@ def resolve(cmd, sp): from .cmdparse import Script from .vendor.pexpect.exceptions import EOF, TIMEOUT from .vendor.vistir.compat import to_native_string + from .vendor.vistir.misc import echo EOF.__module__ = "pexpect.exceptions" from ._compat import decode_output c = delegator.run(Script.parse(cmd).cmdify(), block=False, env=os.environ.copy()) @@ -954,25 +955,23 @@ def resolve(cmd, sp): pass _out = c.subprocess.before if _out: - _out = decode_output("{0}".format(_out)) + _out = decode_output("{0}\n".format(_out)) out += _out sp.text = to_native_string("{0}".format(_out[:100])) - if environments.is_verbose(): - sp.hide_and_write(_out.rstrip()) # if environments.is_verbose(): # sp.hide_and_write(_out.rstrip()) + _out = to_native_string("") if not result and not _out: break - _out = to_native_string("") c.block() if c.return_code != 0: sp.red.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format( "Locking Failed!" )) - click_echo(c.out.strip(), err=True) + echo(c.out.strip(), err=True) if not environments.is_verbose(): - click_echo(out, err=True) - click_echo(c.err.strip(), err=True) + echo(out, err=True) + echo(c.err.strip(), err=True) sys.exit(c.return_code) return c diff --git a/setup.py b/setup.py index 34c43a9768..235b5a109d 100644 --- a/setup.py +++ b/setup.py @@ -22,13 +22,26 @@ sys.exit() required = [ - "pip>=9.0.1", + "pip>=18.0", "certifi", "setuptools>=41.0.0", "virtualenv-clone>=0.2.5", "virtualenv", 'enum34; python_version<"3"' ] +extras = { + "dev": [ + "towncrier", + "bs4", + "twine", + "sphinx<2", + "flake8>=3.3.0,<4.0", + "black;python_version>='3.6'", + "parver", + "invoke", + ], + "tests": ["pytest", "pytest-tap", "pytest-xdist", "flaky", "mock"], +} # https://pypi.python.org/pypi/stdeb/0.8.5#quickstart-2-just-tell-me-the-fastest-way-to-make-a-deb class DebCommand(Command): @@ -128,12 +141,9 @@ def run(self): ], }, python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", - setup_requires=["invoke", "parver", ], + setup_requires=["invoke", "parver"], install_requires=required, - extras_require={ - "test": ["pytest", "pytest-tap", "pytest-xdist", "flaky", "mock"], - "dev": ["towncrier", "bs4"], - }, + extras_require=extras, include_package_data=True, license="MIT", classifiers=[ From 8960250362186b74d0a85b7419f002329bbfb7e6 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 30 Apr 2019 18:13:08 -0400 Subject: [PATCH 37/81] Fix pep517 usage on failure Signed-off-by: Dan Ryan --- pipenv/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/core.py b/pipenv/core.py index 76ce7e8b47..d3e2ed68cc 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -746,7 +746,7 @@ def batch_install(deps_list, procs, failed_deps_queue, pypi_mirror=pypi_mirror, trusted_hosts=trusted_hosts, extra_indexes=extra_indexes, - use_pep517=not retry, + use_pep517=not failed, ) if procs.qsize() < nprocs: c.dep = dep From 411858776f2c0b5c5bce9ea243da2d89ada14cf6 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 30 Apr 2019 19:04:20 -0400 Subject: [PATCH 38/81] Fix pip version parsing Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index 4744c32db2..c09b1d7734 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -258,7 +258,7 @@ def pip_version(self): pkg for pkg in self.get_installed_packages() if pkg.key == "pip" ), None) if pip is not None: - pip_version = parse_version(pip.version) + return parse_version(pip.version) return parse_version("18.0") def get_distributions(self): From 4664925a6491151d28619e144e82d659dca48a87 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 30 Apr 2019 19:35:32 -0400 Subject: [PATCH 39/81] Update lockfile Signed-off-by: Dan Ryan --- Pipfile | 2 +- Pipfile.lock | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Pipfile b/Pipfile index 399a089778..826df207d7 100644 --- a/Pipfile +++ b/Pipfile @@ -7,7 +7,7 @@ stdeb = {version="*", markers="sys_platform == 'linux'"} jedi = "*" isort = "*" rope = "*" -passa = {editable = true, git = "https://github.com/sarugaku/passa.git"} +passa = {git = "https://github.com/sarugaku/passa.git"} [packages] diff --git a/Pipfile.lock b/Pipfile.lock index 5dc9d19b69..cd37bae1c4 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "ed7727922609f3e42737051e7058e8aa7dc64325eee72046e44a6a14db900b02" + "sha256": "f4d89c0aab5c4e865f8c96ba24613fb1e66bae803a3ceaeadb6abf0061898091" }, "pipfile-spec": 6, "requires": {}, @@ -415,9 +415,9 @@ "version": "==0.2.1" }, "passa": { - "editable": true, "git": "https://github.com/sarugaku/passa.git", - "ref": "a2ba0b30c86339cae5ef3a03046fc9c583452c40" + "ref": "a2ba0b30c86339cae5ef3a03046fc9c583452c40", + "version": "==0.3.1.dev0" }, "pathlib2": { "hashes": [ From 4336db9e16578c3cbb530da99e66e52810ea5bc3 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 1 May 2019 01:49:11 -0400 Subject: [PATCH 40/81] Fix ``--no-use-pep517`` fallback and environment Signed-off-by: Dan Ryan --- pipenv/core.py | 22 ++++++++------ pipenv/environment.py | 71 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 72 insertions(+), 21 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index d3e2ed68cc..6a4ccc7a6e 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1416,15 +1416,18 @@ def pip_install( name = requirement.name if requirement.extras: name = "{0}{1}".format(name, requirement.extras_as_pip) - line = "-e {0}#egg={1}".format(vistir.path.path_to_url(repo.checkout_directory), requirement.name) + line = "{0}{1}#egg={2}".format( + line, vistir.path.path_to_url(repo.checkout_directory), requirement.name + ) if repo.subdirectory: line = "{0}&subdirectory={1}".format(line, repo.subdirectory) else: line = requirement.as_line(**line_kwargs) - click.echo( - "Writing requirement line to temporary file: {0!r}".format(line), - err=True - ) + if environments.is_verbose(): + click.echo( + "Writing requirement line to temporary file: {0!r}".format(line), + err=True + ) f.write(vistir.misc.to_bytes(line)) r = f.name f.close() @@ -1441,10 +1444,11 @@ def pip_install( ignore_hashes = True if not requirement.hashes else ignore_hashes line = requirement.as_line(include_hashes=not ignore_hashes) line = "{0} {1}".format(line, " ".join(src)) - click.echo( - "Writing requirement line to temporary file: {0!r}".format(line), - err=True - ) + if environments.is_verbose(): + click.echo( + "Writing requirement line to temporary file: {0!r}".format(line), + err=True + ) f.write(vistir.misc.to_bytes(line)) r = f.name f.close() diff --git a/pipenv/environment.py b/pipenv/environment.py index c09b1d7734..425e235051 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -9,7 +9,7 @@ import sys from distutils.sysconfig import get_python_lib -from sysconfig import get_paths +from sysconfig import get_paths, get_python_version import itertools import pkg_resources @@ -106,10 +106,14 @@ def add_dist(self, dist_name): @cached_property def python_version(self): with self.activated(): - from sysconfig import get_python_version - py_version = get_python_version() + sysconfig = self.safe_import("sysconfig") + py_version = sysconfig.get_python_version() return py_version + def find_libdir(self): + libdir = self.prefix / "lib" + return next(iter(list(libdir.iterdir())), None) + @property def python_info(self): include_dir = self.prefix / "include" @@ -120,6 +124,16 @@ def python_info(self): return {"py_version_short": py_version_short, "abiflags": abiflags} return {} + def _replace_parent_version(self, path, replace_version): + if not os.path.exists(path): + base, leaf = os.path.split(path) + base, parent = os.path.split(base) + leaf = os.path.join(parent, leaf).replace( + replace_version, self.python_info["py_version_short"] + ) + return os.path.join(base, leaf) + return path + @cached_property def base_paths(self): """ @@ -156,18 +170,22 @@ def base_paths(self): 'base': prefix, 'platbase': prefix, }) + current_version = get_python_version() + for k in list(paths.keys()): + if not os.path.exists(paths[k]): + paths[k] = self._replace_parent_version(paths[k], current_version) + if not os.path.exists(paths["purelib"]) and not os.path.exists(paths["platlib"]): + paths = self.get_paths() paths["PATH"] = paths["scripts"] + os.pathsep + os.defpath if "prefix" not in paths: paths["prefix"] = prefix - purelib = make_posix(get_python_lib(plat_specific=0, prefix=prefix)) - platlib = make_posix(get_python_lib(plat_specific=1, prefix=prefix)) + purelib = paths["purelib"] = make_posix(paths["purelib"]) + platlib = paths["platlib"] = make_posix(paths["platlib"]) if purelib == platlib: lib_dirs = purelib else: lib_dirs = purelib + os.pathsep + platlib paths["libdir"] = purelib - paths["purelib"] = purelib - paths["platlib"] = platlib paths['PYTHONPATH'] = os.pathsep.join(["", ".", lib_dirs]) paths["libdirs"] = lib_dirs return paths @@ -175,13 +193,18 @@ def base_paths(self): @cached_property def script_basedir(self): """Path to the environment scripts dir""" - script_dir = self.base_paths["scripts"] - return script_dir + prefix = make_posix(self.prefix.as_posix()) + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) + return paths["scripts"] @property def python(self): """Path to the environment python""" - py = vistir.compat.Path(self.base_paths["scripts"]).joinpath("python").absolute().as_posix() + py = vistir.compat.Path(self.script_basedir).joinpath("python").absolute().as_posix() if not py: return vistir.compat.Path(sys.executable).as_posix() return py @@ -209,6 +232,30 @@ def sys_path(self): path = sys.path return path + def get_paths(self): + """ + Get the paths for the environment by running a subcommand + + :return: The python paths for the environment + :rtype: Dict[str, str] + """ + prefix = make_posix(self.prefix.as_posix()) + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + py_command = ( + "import sysconfig, json, distutils.sysconfig;" + "paths = sysconfig.get_paths('{0}', vars={{'base': '{1}', 'platbase': '{1}'}}" + ");paths['purelib'] = distutils.sysconfig.get_python_lib(plat_specific=0, " + "prefix='{1}');paths['platlib'] = distutils.sysconfig.get_python_lib(" + "plat_specific=1, prefix='{1}');print(json.dumps(paths))" + ) + vistir.misc.echo("command: {0}".format(py_command.format(install_scheme, prefix)), fg="white", style="bold", err=True) + command = [self.python, "-c", py_command.format(install_scheme, prefix)] + c = vistir.misc.run( + command, return_object=True, block=True, nospin=True, write_to_stdout=False + ) + paths = json.loads(vistir.misc.to_text(c.out.strip())) + return paths + @cached_property def sys_prefix(self): """ @@ -218,7 +265,7 @@ def sys_prefix(self): :rtype: :data:`sys.prefix` """ - command = [self.python, "-c" "import sys; print(sys.prefix)"] + command = [self.python, "-c", "import sys; print(sys.prefix)"] c = vistir.misc.run(command, return_object=True, block=True, nospin=True, write_to_stdout=False) sys_prefix = vistir.compat.Path(vistir.misc.to_text(c.out).strip()).as_posix() return sys_prefix @@ -568,7 +615,7 @@ def activated(self, include_extras=True, extra_dists=None): prefix = self.prefix.as_posix() with vistir.contextmanagers.temp_environ(), vistir.contextmanagers.temp_path(): os.environ["PATH"] = os.pathsep.join([ - vistir.compat.fs_str(self.scripts_dir), + vistir.compat.fs_str(self.script_basedir), vistir.compat.fs_str(self.prefix.as_posix()), os.environ.get("PATH", "") ]) From 01b0496c4b22ef77a8ae4b960a315b342e050729 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 6 May 2019 15:40:44 -0400 Subject: [PATCH 41/81] Update path Signed-off-by: Dan Ryan --- Pipfile | 3 +++ Pipfile.lock | 18 ++++++++++-------- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/Pipfile b/Pipfile index 826df207d7..5b4d4c37f6 100644 --- a/Pipfile +++ b/Pipfile @@ -16,3 +16,6 @@ tests = "bash ./run-tests.sh" [pipenv] allow_prereleases = true + +[requires] +python_version = "2.7" diff --git a/Pipfile.lock b/Pipfile.lock index cd37bae1c4..f56081736a 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,10 +1,12 @@ { "_meta": { "hash": { - "sha256": "f4d89c0aab5c4e865f8c96ba24613fb1e66bae803a3ceaeadb6abf0061898091" + "sha256": "8c641b76e29b8bfedd276146b52303ddc84192c71ff9a655839dcf17f559332e" }, "pipfile-spec": 6, - "requires": {}, + "requires": { + "python_version": "2.7" + }, "sources": [ { "name": "pypi", @@ -298,11 +300,11 @@ }, "isort": { "hashes": [ - "sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43", - "sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a" + "sha256:1349c6f7c2a0f7539f5f2ace51a9a8e4a37086ce4de6f78f5f53fb041d0a3cd5", + "sha256:f09911f6eb114e5592abe635aded8bf3d2c3144ebcfcaf81ee32e7af7b7d1870" ], "index": "pypi", - "version": "==4.3.17" + "version": "==4.3.18" }, "itsdangerous": { "hashes": [ @@ -664,11 +666,11 @@ }, "sphinx-click": { "hashes": [ - "sha256:926da1a7c677ae1b35cf255269ff84fec65d0f92e4863acfa77b92cf8ae32275", - "sha256:f0c03d6ea0e4258c9c09646b6f745090ea8dd13e7e045903e4b789dfc02f7846" + "sha256:9ceb0ee39e9734f39f593f99499e2a2f8e62929ac0b5f040b1d0411c02efaab7", + "sha256:c9c45127bbbf720f1d24476cb3e57c806dd270662ae63b53a4b23be6c334491e" ], "index": "pypi", - "version": "==2.0.1" + "version": "==2.1.0" }, "sphinxcontrib-websupport": { "hashes": [ From 5310fbd77f9cf8944d3ab248d89b61bf42188692 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 6 May 2019 15:55:24 -0400 Subject: [PATCH 42/81] update python version in pipfile Signed-off-by: Dan Ryan --- Pipfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Pipfile b/Pipfile index 5b4d4c37f6..a5c1fee0f3 100644 --- a/Pipfile +++ b/Pipfile @@ -17,5 +17,3 @@ tests = "bash ./run-tests.sh" [pipenv] allow_prereleases = true -[requires] -python_version = "2.7" From 06feddeee99767fd65ee90aa34db168286e0fc9c Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 6 May 2019 16:38:25 -0400 Subject: [PATCH 43/81] Update lockfile Signed-off-by: Dan Ryan --- Pipfile.lock | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index f56081736a..0ae560f206 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,12 +1,10 @@ { "_meta": { "hash": { - "sha256": "8c641b76e29b8bfedd276146b52303ddc84192c71ff9a655839dcf17f559332e" + "sha256": "f4d89c0aab5c4e865f8c96ba24613fb1e66bae803a3ceaeadb6abf0061898091" }, "pipfile-spec": 6, - "requires": { - "python_version": "2.7" - }, + "requires": {}, "sources": [ { "name": "pypi", From 0b600ae20c6721763c1ae4b8db6bb2fe450c39b9 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Tue, 7 May 2019 17:01:33 -0400 Subject: [PATCH 44/81] Fix requirementslib marker / installers Signed-off-by: Dan Ryan --- pipenv/environment.py | 2 +- pipenv/utils.py | 10 ++++--- .../vendor/requirementslib/models/markers.py | 27 ++++++++++++------- 3 files changed, 24 insertions(+), 15 deletions(-) diff --git a/pipenv/environment.py b/pipenv/environment.py index 425e235051..49cad91dfe 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -129,7 +129,7 @@ def _replace_parent_version(self, path, replace_version): base, leaf = os.path.split(path) base, parent = os.path.split(base) leaf = os.path.join(parent, leaf).replace( - replace_version, self.python_info["py_version_short"] + replace_version, self.python_info.get("py_version_short", get_python_version()) ) return os.path.join(base, leaf) return path diff --git a/pipenv/utils.py b/pipenv/utils.py index 9a39a3e238..b4d7817bfb 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -585,10 +585,10 @@ def pip_options(self): def session(self): if self._session is None: self._session = self.pip_command._build_session(self.pip_options) - if environments.is_verbose(): - click_echo( - crayons.blue("Using pip: {0}".format(" ".join(self.pip_args))), err=True - ) + # if environments.is_verbose(): + # click_echo( + # crayons.blue("Using pip: {0}".format(" ".join(self.pip_args))), err=True + # ) return self._session @property @@ -673,6 +673,7 @@ def resolve_constraints(self): else: candidate = self.fetch_candidate(result) if getattr(candidate, "requires_python", None): + print(candidate.requires_python) marker = make_marker_from_specifier(candidate.requires_python) self.markers[result.name] = marker result.markers = marker @@ -2061,6 +2062,7 @@ def make_marker_from_specifier(spec): spec = "=={0}".format(spec.lstrip("=")) specset = cleanup_pyspecs(SpecifierSet(spec)) marker_str = " and ".join([format_pyversion(pv) for pv in specset]) + print(marker_str, file=sys.stderr) return Marker(marker_str) # spec_match = next(iter(c for c in Specifier._operators if c in spec), None) # if spec_match: diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index e1014917df..b5f55b34ee 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -149,6 +149,7 @@ def _format_pyspec(specifier): try: op = REPLACE_RANGES[specifier.operator] except KeyError: + print(specifier) return specifier curr_tuple = _tuplize_version(version) try: @@ -160,8 +161,10 @@ def _format_pyspec(specifier): op = "<=" next_tuple = (next_tuple[0], curr_tuple[1]) else: + # print(specifier) return specifier specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple))) + # print(specifier) return specifier @@ -178,16 +181,20 @@ def _get_specs(specset): if isinstance(specset, str): specset = SpecifierSet(specset) result = [] - for spec in set(specset): - version = spec.version - op = spec.operator - if op in ("in", "not in"): - versions = version.split(",") - op = "==" if op == "in" else "!=" - for ver in versions: - result.append((op, _tuplize_version(ver.strip()))) - else: - result.append((spec.operator, _tuplize_version(spec.version))) + try: + for spec in set(specset): + version = spec.version + op = spec.operator + if op in ("in", "not in"): + versions = version.split(",") + op = "==" if op == "in" else "!=" + for ver in versions: + result.append((op, _tuplize_version(ver.strip()))) + else: + result.append((spec.operator, _tuplize_version(spec.version))) + except Exception: + print(specset) + raise return sorted(result, key=operator.itemgetter(1)) From b7de7c1cdfcc459bc9814fd2c335320441967e68 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 10 May 2019 23:14:51 -0400 Subject: [PATCH 45/81] Fix marker formatting and inclusion Signed-off-by: Dan Ryan --- pipenv/resolver.py | 5 +- pipenv/utils.py | 63 ++++++++++++------- .../vendor/requirementslib/models/markers.py | 27 +++----- tests/integration/test_install_markers.py | 6 +- tests/integration/test_install_twists.py | 6 +- 5 files changed, 59 insertions(+), 48 deletions(-) diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 497d2343c5..ca42b44cbc 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -238,6 +238,7 @@ def get_cleaned_dict(self, keep_outdated=False): self.entry_dict["hashes"] = list(entry_hashes | locked_hashes) self.entry_dict["name"] = self.name self.entry_dict["version"] = self.strip_version(self.entry_dict["version"]) + _, self.entry_dict = self.get_markers_from_dict(self.entry_dict) return self.entry_dict @property @@ -603,6 +604,7 @@ def __getattribute__(self, key): def clean_results(results, resolver, project, dev=False): + from pipenv.utils import translate_markers if not project.lockfile_exists: return results lockfile = project.lockfile_content @@ -614,7 +616,7 @@ def clean_results(results, resolver, project, dev=False): name = result.get("name") entry_dict = result.copy() entry = Entry(name, entry_dict, project, resolver, reverse_deps=reverse_deps, dev=dev) - entry_dict = entry.get_cleaned_dict(keep_outdated=False) + entry_dict = translate_markers(entry.get_cleaned_dict(keep_outdated=False)) new_results.append(entry_dict) return new_results @@ -681,7 +683,6 @@ def parse_packages(packages, pre, clear, system, requirements_dir=None): sys.path.insert(0, req.req.setup_info.base_dir) req.req._setup_info.get_info() req.update_name_from_path(req.req.setup_info.base_dir) - print(os.listdir(req.req.setup_info.base_dir)) try: name, entry = req.pipfile_entry except Exception: diff --git a/pipenv/utils.py b/pipenv/utils.py index b4d7817bfb..d7aa4c5605 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from __future__ import print_function import contextlib import errno import logging @@ -353,7 +354,10 @@ def get_metadata( index_lookup, # type: Dict[str, str] markers_lookup, # type: Dict[str, str] project, # type: Project - sources # type: Dict[str, str] + sources, # type: Dict[str, str] + req_dir=None, # type: Optional[str] + pre=False, # type: bool + clear=False, # type: bool ): # type: (...) -> Tuple[Set[str], Dict[str, Dict[str, Union[str, bool, List[str]]]], Dict[str, str], Dict[str, str]] constraints = set() # type: Set[str] @@ -362,6 +366,13 @@ def get_metadata( index_lookup = {} if markers_lookup is None: markers_lookup = {} + if not req_dir: + from .vendor.vistir.path import create_tracked_tempdir + req_dir = create_tracked_tempdir(prefix="pipenv-", suffix="-reqdir") + transient_resolver = cls( + [], req_dir, project, sources, index_lookup=index_lookup, + markers_lookup=markers_lookup, clear=clear, pre=pre + ) for dep in deps: if not dep: continue @@ -370,7 +381,9 @@ def get_metadata( ) index_lookup.update(req_idx) markers_lookup.update(markers_idx) - constraint_update, lockfile_update = cls.get_deps_from_req(req) + constraint_update, lockfile_update = cls.get_deps_from_req( + req, resolver=transient_resolver + ) constraints |= constraint_update skipped.update(lockfile_update) return constraints, skipped, index_lookup, markers_lookup @@ -427,12 +440,13 @@ def get_deps_from_line(cls, line): return cls.get_deps_from_req(req) @classmethod - def get_deps_from_req(cls, req): - # type: (Requirement) -> Tuple[Set[str], Dict[str, Dict[str, Union[str, bool, List[str]]]]] - from requirementslib.models.utils import _requirement_to_str_lowercase_name + def get_deps_from_req(cls, req, resolver=None): + # type: (Requirement, Optional["Resolver"]) -> Tuple[Set[str], Dict[str, Dict[str, Union[str, bool, List[str]]]]] + from .vendor.requirementslib.models.utils import _requirement_to_str_lowercase_name + from .vendor.requirementslib.models.requirements import Requirement constraints = set() # type: Set[str] locked_deps = dict() # type: Dict[str, Dict[str, Union[str, bool, List[str]]]] - if req.is_file_or_url or req.is_vcs and not req.is_wheel: + if (req.is_file_or_url or req.is_vcs) and not req.is_wheel: # for local packages with setup.py files and potential direct url deps: if req.is_vcs: req_list, lockfile = get_vcs_deps(reqs=[req]) @@ -464,7 +478,9 @@ def get_deps_from_req(cls, req): pep423_name(new_req.normalized_name): new_entry } else: - new_constraints, new_lock = cls.get_deps_from_req(new_req) + new_constraints, new_lock = cls.get_deps_from_req( + new_req, resolver + ) locked_deps.update(new_lock) constraints |= new_constraints # if there is no marker or there is a valid marker, add the constraint line @@ -493,6 +509,14 @@ def get_deps_from_req(cls, req): if req and req.requirement and ( req.requirement.marker and not req.requirement.marker.evaluate() ): + pypi = resolver.repository if resolver else None + best_match = pypi.find_best_match(req.ireq) if pypi else None + if best_match: + hashes = resolver.collect_hashes(best_match) if resolver else [] + new_req = Requirement.from_ireq(best_match) + new_req = new_req.add_hashes(hashes) + name, entry = new_req.pipfile_entry + locked_deps[pep423_name(name)] = translate_markers(entry) return constraints, locked_deps constraints.add(req.constraint_line) return constraints, locked_deps @@ -524,7 +548,8 @@ def create( if sources is None: sources = project.sources constraints, skipped, index_lookup, markers_lookup = cls.get_metadata( - deps, index_lookup, markers_lookup, project, sources, + deps, index_lookup, markers_lookup, project, sources, req_dir=req_dir, + pre=pre, clear=clear ) return Resolver( constraints, req_dir, project, sources, index_lookup=index_lookup, @@ -673,7 +698,6 @@ def resolve_constraints(self): else: candidate = self.fetch_candidate(result) if getattr(candidate, "requires_python", None): - print(candidate.requires_python) marker = make_marker_from_specifier(candidate.requires_python) self.markers[result.name] = marker result.markers = marker @@ -837,6 +861,7 @@ def clean_results(self): name, entry = format_requirement_for_lockfile( req, self.markers_lookup, self.index_lookup, collected_hashes ) + entry = translate_markers(entry) if name in results: results[name].update(entry) else: @@ -844,6 +869,7 @@ def clean_results(self): for k in list(self.skipped.keys()): req = Requirement.from_pipfile(k, self.skipped[k]) name, entry = self._clean_skipped_result(req, self.skipped[k]) + entry = translate_markers(entry) if name in results: results[name].update(entry) else: @@ -1026,6 +1052,7 @@ def prepare_lockfile(results, pipfile, lockfile): lockfile[name] = lockfile_entry[name] else: lockfile[name].update(lockfile_entry[name]) + lockfile[name] = translate_markers(lockfile[name]) else: lockfile[name] = lockfile_entry[name] return lockfile @@ -1708,13 +1735,12 @@ def translate_markers(pipfile_entry): """ if not isinstance(pipfile_entry, Mapping): raise TypeError("Entry is not a pipfile formatted mapping.") - from .vendor.distlib.markers import DEFAULT_CONTEXT as marker_context - from .vendor.packaging.markers import Marker + from .vendor.packaging.markers import Marker, default_environment from .vendor.vistir.misc import dedup - allowed_marker_keys = ["markers"] + [k for k in marker_context.keys()] + allowed_marker_keys = ["markers"] + list(default_environment().keys()) provided_keys = list(pipfile_entry.keys()) if hasattr(pipfile_entry, "keys") else [] - pipfile_markers = [k for k in provided_keys if k in allowed_marker_keys] + pipfile_markers = set(provided_keys) & set(allowed_marker_keys) new_pipfile = dict(pipfile_entry).copy() marker_set = set() if "markers" in new_pipfile: @@ -1724,7 +1750,7 @@ def translate_markers(pipfile_entry): for m in pipfile_markers: entry = "{0}".format(pipfile_entry[m]) if m != "markers": - marker_set.add(str(Marker("{0}{1}".format(m, entry)))) + marker_set.add(str(Marker("{0} {1}".format(m, entry)))) new_pipfile.pop(m) if marker_set: new_pipfile["markers"] = str(Marker(" or ".join( @@ -2062,13 +2088,4 @@ def make_marker_from_specifier(spec): spec = "=={0}".format(spec.lstrip("=")) specset = cleanup_pyspecs(SpecifierSet(spec)) marker_str = " and ".join([format_pyversion(pv) for pv in specset]) - print(marker_str, file=sys.stderr) return Marker(marker_str) - # spec_match = next(iter(c for c in Specifier._operators if c in spec), None) - # if spec_match: - # spec_index = spec.index(spec_match) - # spec_end = spec_index + len(spec_match) - # op = spec[spec_index:spec_end].strip() - # version = spec[spec_end:].strip() - # spec = " {0} '{1}'".format(op, version) - # return Marker("python_version {0}".format(spec)) diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py index b5f55b34ee..e1014917df 100644 --- a/pipenv/vendor/requirementslib/models/markers.py +++ b/pipenv/vendor/requirementslib/models/markers.py @@ -149,7 +149,6 @@ def _format_pyspec(specifier): try: op = REPLACE_RANGES[specifier.operator] except KeyError: - print(specifier) return specifier curr_tuple = _tuplize_version(version) try: @@ -161,10 +160,8 @@ def _format_pyspec(specifier): op = "<=" next_tuple = (next_tuple[0], curr_tuple[1]) else: - # print(specifier) return specifier specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple))) - # print(specifier) return specifier @@ -181,20 +178,16 @@ def _get_specs(specset): if isinstance(specset, str): specset = SpecifierSet(specset) result = [] - try: - for spec in set(specset): - version = spec.version - op = spec.operator - if op in ("in", "not in"): - versions = version.split(",") - op = "==" if op == "in" else "!=" - for ver in versions: - result.append((op, _tuplize_version(ver.strip()))) - else: - result.append((spec.operator, _tuplize_version(spec.version))) - except Exception: - print(specset) - raise + for spec in set(specset): + version = spec.version + op = spec.operator + if op in ("in", "not in"): + versions = version.split(",") + op = "==" if op == "in" else "!=" + for ver in versions: + result.append((op, _tuplize_version(ver.strip()))) + else: + result.append((spec.operator, _tuplize_version(spec.version))) return sorted(result, key=operator.itemgetter(1)) diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 96f6edff38..1d3fb1bd13 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -25,7 +25,7 @@ def test_package_environment_markers(PipenvInstance, pypi): c = p.pipenv('install') assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['tablib'] + assert 'markers' in p.lockfile['default']['tablib'], p.lockfile["default"]["tablib"] c = p.pipenv('run python -c "import tablib;"') assert c.return_code == 1 @@ -95,8 +95,8 @@ def test_top_level_overrides_environment_markers(PipenvInstance, pypi): c = p.pipenv('install') assert c.return_code == 0 - - assert p.lockfile['default']['funcsigs']['markers'] == "os_name == 'splashwear'" + assert "markers" in p.lockfile['default']['funcsigs'], p.lockfile['default']['funcsigs'] + assert p.lockfile['default']['funcsigs']['markers'] == "os_name == 'splashwear'", p.lockfile['default']['funcsigs'] @pytest.mark.markers diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py index 98236b8afd..08b2ef1e31 100644 --- a/tests/integration/test_install_twists.py +++ b/tests/integration/test_install_twists.py @@ -1,5 +1,6 @@ import os import shutil +import sys import pytest @@ -51,10 +52,9 @@ def test_local_extras_install(PipenvInstance, pypi): assert "testpipenv" in p.lockfile["default"] assert p.lockfile["default"]["testpipenv"]["extras"] == ["dev"] assert "six" in p.lockfile["default"] - c = p.pipenv("--rm") + c = p.pipenv("uninstall --all") assert c.return_code == 0 - project = Project() - project.write_toml({"packages": {}, "dev-packages": {}}) + print("Current directory: {0}".format(os.getcwd()), file=sys.stderr) c = p.pipenv("install {0}".format(line)) assert c.return_code == 0 assert "testpipenv" in p.pipfile["packages"] From c6979fd698511e79b9c14fdeff751e9452689e0d Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 00:02:57 -0400 Subject: [PATCH 46/81] Update exception handlers Signed-off-by: Dan Ryan --- pipenv/core.py | 7 +++-- pipenv/exceptions.py | 64 ++++++++++++++++++++++++++++++++++---------- pipenv/utils.py | 8 +++--- 3 files changed, 58 insertions(+), 21 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index 6a4ccc7a6e..dd73c71191 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -14,11 +14,11 @@ import vistir import click_completion -import crayons import delegator import dotenv import pipfile +from .patched import crayons from . import environments, exceptions, pep508checker, progress from ._compat import fix_utf8, decode_for_output from .cmdparse import Script @@ -917,7 +917,6 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): pip_config = {} # Actually create the virtualenv. - nospin = environments.PIPENV_NOSPIN with create_spinner("Creating virtual environment...") as sp: c = vistir.misc.run( cmd, verbose=False, return_object=True, write_to_stdout=False, @@ -925,10 +924,10 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): ) click.echo(crayons.blue("{0}".format(c.out)), err=True) if c.returncode != 0: - sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format("Failed creating virtual environment")) + sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format(u"Failed creating virtual environment")) error = c.err if environments.is_verbose() else exceptions.prettify_exc(c.err) raise exceptions.VirtualenvCreationException( - extra=[crayons.red("{0}".format(error)),] + extra=crayons.red("{0}".format(error)) ) else: diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 23a1eb075f..ad7c192ead 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -1,8 +1,10 @@ # -*- coding=utf-8 -*- import itertools +import re import sys +from collections import namedtuple from pprint import pformat from traceback import format_exception, format_tb @@ -19,9 +21,20 @@ from .vendor.vistir.misc import echo as click_echo import vistir -KNOWN_EXCEPTIONS = { - "PermissionError": "Permission denied:", -} +ANSI_REMOVAL_RE = re.compile(r"\033\[((?:\d|;)*)([a-zA-Z])", re.MULTILINE) +STRING_TYPES = (six.string_types, crayons.ColoredString) +KnownException = namedtuple( + 'KnownException', ['exception_name', 'match_string', 'show_from_string', 'prefix'], + defaults=[None, None, None, ""] +) +KNOWN_EXCEPTIONS = [ + KnownException("PermissionError", prefix="Permission Denied:"), + KnownException( + "VirtualenvCreationException", + match_string="do_create_virtualenv", + show_from_string=None + ) +] def handle_exception(exc_type, exception, traceback, hook=sys.excepthook): @@ -31,6 +44,7 @@ def handle_exception(exc_type, exception, traceback, hook=sys.excepthook): exc = format_exception(exc_type, exception, traceback) tb = format_tb(traceback, limit=-6) lines = itertools.chain.from_iterable([frame.splitlines() for frame in tb]) + formatted_lines = [] for line in lines: line = line.strip("'").strip('"').strip("\n").strip() if not line.startswith("File"): @@ -40,7 +54,10 @@ def handle_exception(exc_type, exception, traceback, hook=sys.excepthook): line = "[{0!s}]: {1}".format( exception.__class__.__name__, line ) - click_echo(decode_for_output(line), err=True) + formatted_lines.append(line) + # use new exception prettification rules to format exceptions according to + # UX rules + click_echo(decode_for_output(prettify_exc("\n".join(formatted_lines))), err=True) exception.show() @@ -62,7 +79,7 @@ def show(self, file=None): if file is None: file = vistir.misc.get_text_stderr() if self.extra: - if isinstance(self.extra, six.string_types): + if isinstance(self.extra, STRING_TYPES): self.extra = [self.extra,] for extra in self.extra: extra = "[pipenv.exceptions.{0!s}]: {1}".format( @@ -141,7 +158,7 @@ def show(self, file=None): if self.ctx is not None: color = self.ctx.color if self.extra: - if isinstance(self.extra, six.string_types): + if isinstance(self.extra, STRING_TYPES): self.extra = [self.extra,] for extra in self.extra: if color: @@ -177,7 +194,7 @@ def show(self, file=None): if file is None: file = vistir.misc.get_text_stderr() if self.extra: - if isinstance(self.extra, six.string_types): + if isinstance(self.extra, STRING_TYPES): self.extra = [self.extra,] for extra in self.extra: click_echo(decode_for_output(extra, file), file=file) @@ -283,7 +300,16 @@ def __init__(self, message=None, **kwargs): if not message: message = "Failed to create virtual environment." self.message = message - VirtualenvException.__init__(self, message, **kwargs) + extra = kwargs.pop("extra", None) + if extra is not None and isinstance(extra, STRING_TYPES): + # note we need the format interpolation because ``crayons.ColoredString`` + # is not an actual string type but is only a preparation for interpolation + # so replacement or parsing requires this step + extra = ANSI_REMOVAL_RE.sub("", "{0}".format(extra)) + if "KeyboardInterrupt" in extra: + extra = crayons.red("Virtualenv creation interrupted by user", bold=True) + self.extra = extra = [extra,] + VirtualenvException.__init__(self, message, extra=extra) class UninstallError(PipenvException): @@ -413,12 +439,22 @@ def __init__(self, req=None): def prettify_exc(error): """Catch known errors and prettify them instead of showing the entire traceback, for better UX""" - matched_exceptions = [k for k in KNOWN_EXCEPTIONS.keys() if k in error] - if not matched_exceptions: - return "{}".format(vistir.misc.decode_for_output(error)) errors = [] - for match in matched_exceptions: - _, error, info = error.rpartition(KNOWN_EXCEPTIONS[match]) - errors.append("{} {}".format(error, info)) + for exc in KNOWN_EXCEPTIONS: + search_string = exc.match_string if exc.match_string else exc.exception_name + split_string = exc.show_from_string if exc.show_from_string else exc.exception_name + if search_string in error: + # for known exceptions with no display rules and no prefix + # we should simply show nothing + if not exc.show_from_string and not exc.prefix: + errors.append("") + continue + elif exc.prefix and exc.prefix in error: + _, error, info = error.rpartition(exc.prefix) + else: + _, error, info = error.rpartition(split_string) + errors.append("{0} {1}".format(error, info)) + if not errors: + return "{}".format(vistir.misc.decode_for_output(error)) return "\n".join(errors) diff --git a/pipenv/utils.py b/pipenv/utils.py index d7aa4c5605..e0167291a5 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -1744,9 +1744,11 @@ def translate_markers(pipfile_entry): new_pipfile = dict(pipfile_entry).copy() marker_set = set() if "markers" in new_pipfile: - marker = str(Marker(new_pipfile.pop("markers"))) - if 'extra' not in marker: - marker_set.add(marker) + marker_str = new_pipfile.pop("markers") + if marker_str is not None: + marker = str(Marker(marker_str)) + if 'extra' not in marker: + marker_set.add(marker) for m in pipfile_markers: entry = "{0}".format(pipfile_entry[m]) if m != "markers": From 102b68228483584d122f13620d49d31966d26897 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 21:27:51 -0400 Subject: [PATCH 47/81] Fix keep_outdated markers Signed-off-by: Dan Ryan --- tests/integration/test_lock.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 6a0f15f0e7..29cc0cc472 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -98,12 +98,12 @@ def test_lock_keep_outdated(PipenvInstance, pypi): def test_keep_outdated_doesnt_remove_lockfile_entries(PipenvInstance, pypi): with PipenvInstance(chdir=True, pypi=pypi) as p: p._pipfile.add("requests", "==2.18.4") - p._pipfile.add("colorama", {"version": "*", "markers": "os_name='FakeOS'"}) + p._pipfile.add("colorama", {"version": "*", "markers": "os_name=='FakeOS'"}) p.pipenv("install") p._pipfile.add("six", "*") p.pipenv("lock --keep-outdated") assert "colorama" in p.lockfile["default"] - assert p.lockfile["default"]["colorama"]["markers"] == "os_name='FakeOS'" + assert p.lockfile["default"]["colorama"]["markers"] == "os_name=='FakeOS'" @pytest.mark.lock From 210495992b6ea974895bdb65c213336683eb01b0 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 21:36:06 -0400 Subject: [PATCH 48/81] Fix marker formatting in keep outdated test Signed-off-by: Dan Ryan --- tests/integration/test_lock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 29cc0cc472..b783000bb9 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -103,7 +103,7 @@ def test_keep_outdated_doesnt_remove_lockfile_entries(PipenvInstance, pypi): p._pipfile.add("six", "*") p.pipenv("lock --keep-outdated") assert "colorama" in p.lockfile["default"] - assert p.lockfile["default"]["colorama"]["markers"] == "os_name=='FakeOS'" + assert p.lockfile["default"]["colorama"]["markers"] == "os_name == 'FakeOS'" @pytest.mark.lock From 8e1e82504168d5ceaeb67cae8fa3e8f321ebc329 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 22:02:58 -0400 Subject: [PATCH 49/81] Update vendored pip licenses Signed-off-by: Dan Ryan --- Pipfile.lock | 8 ++--- .../notpip/_vendor/colorama/LICENSE.txt | 1 - .../patched/notpip/_vendor/requests/LICENSE | 2 +- pipenv/patched/notpip/_vendor/six.LICENSE | 2 +- .../notpip/_vendor/urllib3/LICENSE.txt | 32 +++++++++-------- pipenv/patched/notpip/_vendor/vendor.txt | 36 +++++++++---------- pipenv/vendor/vendor_pip.txt | 36 +++++++++---------- tasks/vendoring/__init__.py | 12 +++++++ 8 files changed, 71 insertions(+), 58 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 0ae560f206..53c174cfb3 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -519,7 +519,7 @@ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a", "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.0" }, "pytest": { @@ -581,7 +581,7 @@ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" ], - "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.21.0" }, "requests-toolbelt": { @@ -637,7 +637,7 @@ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.12.0" }, "snowballstemmer": { @@ -721,7 +721,7 @@ "sha256:0a860bf2683fdbb4812fe539a6c22ea3f1777843ea985cb8c3807db448a0f7ab", "sha256:e288416eecd4df19d12407d0c913cbf77aa8009d7fddb18f632aded3bdbdda6b" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1'", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.32.1" }, "twine": { diff --git a/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt b/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt index 5f567799f3..3105888ec1 100644 --- a/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt +++ b/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt @@ -25,4 +25,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/pipenv/patched/notpip/_vendor/requests/LICENSE b/pipenv/patched/notpip/_vendor/requests/LICENSE index 2e68b82ecb..841c6023b9 100644 --- a/pipenv/patched/notpip/_vendor/requests/LICENSE +++ b/pipenv/patched/notpip/_vendor/requests/LICENSE @@ -4,7 +4,7 @@ Copyright 2018 Kenneth Reitz you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/pipenv/patched/notpip/_vendor/six.LICENSE b/pipenv/patched/notpip/_vendor/six.LICENSE index f3068bfd9e..365d10741b 100644 --- a/pipenv/patched/notpip/_vendor/six.LICENSE +++ b/pipenv/patched/notpip/_vendor/six.LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2010-2017 Benjamin Peterson +Copyright (c) 2010-2018 Benjamin Peterson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt b/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt index 1c3283ee5b..c89cf27b85 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt +++ b/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt @@ -1,19 +1,21 @@ -This is the MIT license: http://www.opensource.org/licenses/mit-license.php +MIT License -Copyright 2008-2016 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -Permission is hereby granted, free of charge, to any person obtaining a copy of this -software and associated documentation files (the "Software"), to deal in the Software -without restriction, including without limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons -to whom the Software is furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all copies or -substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR -PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE -FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/vendor.txt b/pipenv/patched/notpip/_vendor/vendor.txt index 9389dd947d..7b5482550b 100644 --- a/pipenv/patched/notpip/_vendor/vendor.txt +++ b/pipenv/patched/notpip/_vendor/vendor.txt @@ -1,23 +1,23 @@ appdirs==1.4.3 -distlib==0.2.7 -distro==1.3.0 -html5lib==1.0.1 -six==1.11.0 -colorama==0.3.9 CacheControl==0.12.5 -msgpack-python==0.5.6 -lockfile==0.12.2 -progress==1.4 +colorama==0.4.1 +distlib==0.2.8 +distro==1.4.0 +html5lib==1.0.1 ipaddress==1.0.22 # Only needed on 2.6 and 2.7 -packaging==18.0 -pep517==0.2 -pyparsing==2.2.1 -pytoml==0.1.19 -retrying==1.3.3 -requests==2.19.1 +lockfile==0.12.2 +msgpack==0.5.6 +packaging==19.0 +pep517==0.5.0 +progress==1.5 +pyparsing==2.4.0 +pytoml==0.1.20 +requests==2.21.0 + certifi==2019.3.9 chardet==3.0.4 - idna==2.7 - urllib3==1.23 - certifi==2018.8.24 -setuptools==40.4.3 + idna==2.8 + urllib3==1.25.2 +retrying==1.3.3 +setuptools==41.0.1 +six==1.12.0 webencodings==0.5.1 diff --git a/pipenv/vendor/vendor_pip.txt b/pipenv/vendor/vendor_pip.txt index 9389dd947d..7b5482550b 100644 --- a/pipenv/vendor/vendor_pip.txt +++ b/pipenv/vendor/vendor_pip.txt @@ -1,23 +1,23 @@ appdirs==1.4.3 -distlib==0.2.7 -distro==1.3.0 -html5lib==1.0.1 -six==1.11.0 -colorama==0.3.9 CacheControl==0.12.5 -msgpack-python==0.5.6 -lockfile==0.12.2 -progress==1.4 +colorama==0.4.1 +distlib==0.2.8 +distro==1.4.0 +html5lib==1.0.1 ipaddress==1.0.22 # Only needed on 2.6 and 2.7 -packaging==18.0 -pep517==0.2 -pyparsing==2.2.1 -pytoml==0.1.19 -retrying==1.3.3 -requests==2.19.1 +lockfile==0.12.2 +msgpack==0.5.6 +packaging==19.0 +pep517==0.5.0 +progress==1.5 +pyparsing==2.4.0 +pytoml==0.1.20 +requests==2.21.0 + certifi==2019.3.9 chardet==3.0.4 - idna==2.7 - urllib3==1.23 - certifi==2018.8.24 -setuptools==40.4.3 + idna==2.8 + urllib3==1.25.2 +retrying==1.3.3 +setuptools==41.0.1 +six==1.12.0 webencodings==0.5.1 diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index dce9e5a75b..4b01ef0d21 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -666,6 +666,18 @@ def generate_patch(ctx, package_path, patch_description, base='HEAD'): ctx.run(command) +@invoke.task() +def update_pip_deps(ctx): + patched_dir = _get_patched_dir(ctx) + base_vendor_dir = _get_vendor_dir(ctx) + base_vendor_file = base_vendor_dir / "vendor_pip.txt" + pip_dir = patched_dir / "notpip" + vendor_dir = pip_dir / "_vendor" + vendor_file = vendor_dir / "vendor.txt" + vendor_file.write_bytes(base_vendor_file.read_bytes()) + download_licenses(ctx, vendor_dir) + + @invoke.task(name=TASK_NAME) def main(ctx, package=None): vendor_dir = _get_vendor_dir(ctx) From fd91d9627327a5a76c43163339bc6790818b369a Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 22:23:30 -0400 Subject: [PATCH 50/81] Fix namedtuple implementation Signed-off-by: Dan Ryan --- pipenv/exceptions.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index ad7c192ead..abc545821d 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -5,8 +5,7 @@ import sys from collections import namedtuple -from pprint import pformat -from traceback import format_exception, format_tb +from traceback import format_tb import six @@ -14,19 +13,25 @@ from ._compat import decode_for_output from .patched import crayons from .vendor.click.exceptions import ( - Abort, BadOptionUsage, BadParameter, ClickException, Exit, FileError, - MissingParameter, UsageError + ClickException, FileError, UsageError ) -from .vendor.click.types import Path from .vendor.vistir.misc import echo as click_echo import vistir ANSI_REMOVAL_RE = re.compile(r"\033\[((?:\d|;)*)([a-zA-Z])", re.MULTILINE) STRING_TYPES = (six.string_types, crayons.ColoredString) -KnownException = namedtuple( - 'KnownException', ['exception_name', 'match_string', 'show_from_string', 'prefix'], - defaults=[None, None, None, ""] -) + +if sys.version_info[:2] >= (3, 7): + KnownException = namedtuple( + 'KnownException', ['exception_name', 'match_string', 'show_from_string', 'prefix'], + defaults=[None, None, None, ""] + ) +else: + KnownException = namedtuple( + 'KnownException', ['exception_name', 'match_string', 'show_from_string', 'prefix'], + ) + KnownException.__new__.func_defaults = (None, None, None, "") + KNOWN_EXCEPTIONS = [ KnownException("PermissionError", prefix="Permission Denied:"), KnownException( @@ -41,7 +46,6 @@ def handle_exception(exc_type, exception, traceback, hook=sys.excepthook): if environments.is_verbose() or not issubclass(exc_type, ClickException): hook(exc_type, exception, traceback) else: - exc = format_exception(exc_type, exception, traceback) tb = format_tb(traceback, limit=-6) lines = itertools.chain.from_iterable([frame.splitlines() for frame in tb]) formatted_lines = [] From 27c529a0fee8ee6441d34774c884f66adf771c90 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 22:32:38 -0400 Subject: [PATCH 51/81] fix namedtuple syntax... Signed-off-by: Dan Ryan --- pipenv/exceptions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index abc545821d..27b0586892 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -30,7 +30,7 @@ KnownException = namedtuple( 'KnownException', ['exception_name', 'match_string', 'show_from_string', 'prefix'], ) - KnownException.__new__.func_defaults = (None, None, None, "") + KnownException.__new__.__defaults__ = (None, None, None, "") KNOWN_EXCEPTIONS = [ KnownException("PermissionError", prefix="Permission Denied:"), From 65751d37c019264bfe752e1911252eb06b875bf0 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Wed, 15 May 2019 23:57:00 -0400 Subject: [PATCH 52/81] Update dependencies and fix azure pipeline definitions Signed-off-by: Dan Ryan --- .azure-pipelines/jobs/run-manifest-check.yml | 2 +- .azure-pipelines/jobs/run-tests-windows.yml | 4 +- .azure-pipelines/jobs/run-vendor-scripts.yml | 11 +-- .azure-pipelines/jobs/test.yml | 34 ++------- ...lenv.yml => create-virtualenv-windows.yml} | 0 .../steps/install-dependencies.yml | 2 +- .azure-pipelines/steps/run-tests-linux.yml | 8 +++ .azure-pipelines/steps/run-tests-windows.yml | 21 ++++++ .azure-pipelines/steps/run-tests.yml | 37 +++++----- .azure-pipelines/windows.yml | 2 +- Pipfile.lock | 45 ++++++------ azure-pipelines.yml | 69 +++++++++++++++++++ pipenv/exceptions.py | 2 +- setup.py | 4 +- .../vendoring/patches/patched/piptools.patch | 2 +- 15 files changed, 157 insertions(+), 86 deletions(-) rename .azure-pipelines/steps/{create-virtualenv.yml => create-virtualenv-windows.yml} (100%) create mode 100644 .azure-pipelines/steps/run-tests-linux.yml create mode 100644 .azure-pipelines/steps/run-tests-windows.yml create mode 100644 azure-pipelines.yml diff --git a/.azure-pipelines/jobs/run-manifest-check.yml b/.azure-pipelines/jobs/run-manifest-check.yml index 6aa634800c..4d12a8005b 100644 --- a/.azure-pipelines/jobs/run-manifest-check.yml +++ b/.azure-pipelines/jobs/run-manifest-check.yml @@ -11,5 +11,5 @@ steps: export GIT_SSL_CAINFO=$(python -m certifi) export LANG=C.UTF-8 python -m pip install --upgrade setuptools twine readme_renderer[md] - python setup.py sdist + python setup.py sdist bdist_wheel twine check dist/* diff --git a/.azure-pipelines/jobs/run-tests-windows.yml b/.azure-pipelines/jobs/run-tests-windows.yml index 700732c52b..05573b107e 100644 --- a/.azure-pipelines/jobs/run-tests-windows.yml +++ b/.azure-pipelines/jobs/run-tests-windows.yml @@ -13,6 +13,6 @@ steps: - template: ../steps/install-dependencies.yml -- template: ../steps/create-virtualenv.yml +- template: ../steps/create-virtualenv-windows.yml -- template: ../steps/run-tests.yml +- template: ../steps/run-tests-windows.yml diff --git a/.azure-pipelines/jobs/run-vendor-scripts.yml b/.azure-pipelines/jobs/run-vendor-scripts.yml index a419d941c0..947014b5d3 100644 --- a/.azure-pipelines/jobs/run-vendor-scripts.yml +++ b/.azure-pipelines/jobs/run-vendor-scripts.yml @@ -9,14 +9,9 @@ jobs: strategy: maxParallel: 4 matrix: - ${{ if eq(parameters.vmImage, 'vs2017-win2016') }}: - Python37: - python.version: '>= 3.7.2' - python.architecture: x64 - ${{ if ne(parameters.vmImage, 'vs2017-win2016' )}}: - Python37: - python.version: '>= 3.7' - python.architecture: x64 + Python37: + python.version: '>= 3.7' + python.architecture: x64 steps: - task: UsePythonVersion@0 displayName: Use Python $(python.version) diff --git a/.azure-pipelines/jobs/test.yml b/.azure-pipelines/jobs/test.yml index 4c8fc42035..b6c341f880 100644 --- a/.azure-pipelines/jobs/test.yml +++ b/.azure-pipelines/jobs/test.yml @@ -12,37 +12,15 @@ jobs: Python27: python.version: '2.7' python.architecture: x64 - ${{ if eq(parameters.vmImage, 'vs2017-win2016') }}: - # TODO remove once vs2017-win2016 has Python 3.7 - Python37: - python.version: '>= 3.7.2' - python.architecture: x64 - ${{ if ne(parameters.vmImage, 'vs2017-win2016' )}}: - Python37: - python.version: '>= 3.7' - python.architecture: x64 - steps: - - ${{ if eq(parameters.vmImage, 'vs2017-win2016') }}: - - template: ./run-tests-windows.yml - - - ${{ if ne(parameters.vmImage, 'vs2017-win2016') }}: - - template: ./run-tests.yml - -- job: Test_Secondary - displayName: Test python3.6 - # Run after Test_Primary so we don't devour time and jobs if tests are going to fail - # dependsOn: Test_Primary - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - maxParallel: 4 - matrix: + Python37: + python.version: '>= 3.7' + python.architecture: x64 Python36: - python.version: '3.6' + python.version: '>= 3.6' python.architecture: x64 steps: - - ${{ if eq(parameters.vmImage, 'vs2017-win2016') }}: + - ${{ if eq(parameters.vmImage, 'windows-2019') }}: - template: ./run-tests-windows.yml - - ${{ if ne(parameters.vmImage, 'vs2017-win2016') }}: + - ${{ if ne(parameters.vmImage, 'windows-2019') }}: - template: ./run-tests.yml diff --git a/.azure-pipelines/steps/create-virtualenv.yml b/.azure-pipelines/steps/create-virtualenv-windows.yml similarity index 100% rename from .azure-pipelines/steps/create-virtualenv.yml rename to .azure-pipelines/steps/create-virtualenv-windows.yml diff --git a/.azure-pipelines/steps/install-dependencies.yml b/.azure-pipelines/steps/install-dependencies.yml index fd0da8415d..1537640aac 100644 --- a/.azure-pipelines/steps/install-dependencies.yml +++ b/.azure-pipelines/steps/install-dependencies.yml @@ -1,3 +1,3 @@ steps: -- script: 'python -m pip install --upgrade pip setuptools wheel && python -m pip install -e .[test] --upgrade' +- script: 'python -m pip install --upgrade pip setuptools wheel && python -m pip install -e .[tests] --upgrade' displayName: Upgrade Pip & Install Pipenv diff --git a/.azure-pipelines/steps/run-tests-linux.yml b/.azure-pipelines/steps/run-tests-linux.yml new file mode 100644 index 0000000000..c49be8b1c6 --- /dev/null +++ b/.azure-pipelines/steps/run-tests-linux.yml @@ -0,0 +1,8 @@ +- script: | + # Fix Git SSL errors + export GIT_SSL_CAINFO="$(python -m certifi)" + export LANG="C.UTF-8" + export PIP_PROCESS_DEPENDENCY_LINKS="1" + git submodule sync && git submodule update --init --recursive + pipenv run pytest --junitxml=test-results.xml + displayName: Run integration tests diff --git a/.azure-pipelines/steps/run-tests-windows.yml b/.azure-pipelines/steps/run-tests-windows.yml new file mode 100644 index 0000000000..cc3aac2777 --- /dev/null +++ b/.azure-pipelines/steps/run-tests-windows.yml @@ -0,0 +1,21 @@ +steps: +- powershell: | + # Fix Git SSL errors + Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pip install certifi" + Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m certifi > cacert.txt" + Write-Host "##vso[task.setvariable variable=GIT_SSL_CAINFO]$(Get-Content cacert.txt)" + $env:GIT_SSL_CAINFO="$(Get-Content cacert.txt)" + # Shorten paths to get under MAX_PATH or else integration tests will fail + # https://bugs.python.org/issue18199 + subst T: "$env:TEMP" + Write-Host "##vso[task.setvariable variable=TEMP]T:\" + $env:TEMP='T:\' + Write-Host "##vso[task.setvariable variable=TMP]T:\" + $env:TMP='T:\' + git submodule sync + git submodule update --init --recursive + Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests" + displayName: Run integration tests + env: + VIRTUAL_ENV: $(VIRTUAL_ENV) + VIRTUAL_ENV_PY: $(VIRTUAL_ENV_PY) diff --git a/.azure-pipelines/steps/run-tests.yml b/.azure-pipelines/steps/run-tests.yml index 9c38d5e8df..cee9b84668 100644 --- a/.azure-pipelines/steps/run-tests.yml +++ b/.azure-pipelines/steps/run-tests.yml @@ -1,24 +1,23 @@ steps: -- powershell: | - # Fix Git SSL errors - Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pip install certifi" - Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m certifi > cacert.txt" - Write-Host "##vso[task.setvariable variable=GIT_SSL_CAINFO]$(Get-Content cacert.txt)" - $env:GIT_SSL_CAINFO="$(Get-Content cacert.txt)" - # Shorten paths to get under MAX_PATH or else integration tests will fail - # https://bugs.python.org/issue18199 - subst T: "$env:TEMP" - Write-Host "##vso[task.setvariable variable=TEMP]T:\" - $env:TEMP='T:\' - Write-Host "##vso[task.setvariable variable=TMP]T:\" - $env:TMP='T:\' - git submodule sync - git submodule update --init --recursive - Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests" - displayName: Run integration tests + - task: UsePythonVersion@0 + inputs: + versionSpec: $(python.version) + architecture: '$(python.architecture)' + addToPath: true + displayName: Use Python $(python.version) + + - template: install-dependencies.yml +steps: +- script: | + echo '##vso[task.setvariable variable=PIPENV_DEFAULT_PYTHON_VERSION]$(PYTHON_VERSION)' env: - VIRTUAL_ENV: $(VIRTUAL_ENV) - VIRTUAL_ENV_PY: $(VIRTUAL_ENV_PY) + PYTHON_VERSION: $(python.version) +- ${{ if eq(parameters.vmImage, 'windows-2019') }}: + - template: create-virtualenv-windows.yml + - template: run-tests-windows.yml +- ${{ if ne(parameters.vmImage, 'windows-2019') }}: + - template: create-virtualenv-linux.yml + - template: run-tests-linux.yml - task: PublishTestResults@2 displayName: Publish Test Results diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml index 0f04afbe99..c763bd34d8 100644 --- a/.azure-pipelines/windows.yml +++ b/.azure-pipelines/windows.yml @@ -21,4 +21,4 @@ trigger: jobs: - template: jobs/test.yml parameters: - vmImage: vs2017-win2016 + vmImage: windows-2019 diff --git a/Pipfile.lock b/Pipfile.lock index 53c174cfb3..416bfd7990 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -72,7 +72,7 @@ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a", "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd" ], - "markers": "python_version < '3'", + "markers": "python_version == '2.7'", "version": "==1.5" }, "backports.shutil-get-terminal-size": { @@ -80,7 +80,7 @@ "sha256:0975ba55054c15e346944b38956a4c9cbee9009391e41b86c68990effb8c1f64", "sha256:713e7a8228ae80341c70586d1cc0a8caa5207346927e23d09dcbcaf18eadec80" ], - "markers": "python_version < '3.3'", + "markers": "python_version == '2.7'", "version": "==1.0.0" }, "backports.weakref": { @@ -88,7 +88,7 @@ "sha256:81bc9b51c0abc58edc76aefbbc68c62a787918ffe943a37947e162c3f8e19e82", "sha256:bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2" ], - "markers": "python_version < '3.3'", + "markers": "python_version == '2.7'", "version": "==1.0.post1" }, "beautifulsoup4": { @@ -165,12 +165,11 @@ }, "configparser": { "hashes": [ - "sha256:5bd5fa2a491dc3cfe920a3f2a107510d65eceae10e9c6e547b90261a4710df32", - "sha256:c114ff90ee2e762db972fa205f02491b1f5cf3ff950decd8542c62970c9bedac", - "sha256:df28e045fbff307a28795b18df6ac8662be3219435560ddb068c283afab1ea7a" + "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", + "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" ], - "markers": "python_version < '3.2'", - "version": "==3.7.1" + "markers": "python_version == '2.7'", + "version": "==3.7.4" }, "distlib": { "hashes": [ @@ -247,7 +246,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "functools32": { @@ -255,7 +254,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version >= '2.7' and python_version < '2.8'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -263,7 +262,7 @@ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.2'", "version": "==3.2.0" }, "idna": { @@ -617,20 +616,20 @@ }, "scandir": { "hashes": [ - "sha256:04b8adb105f2ed313a7c2ef0f1cf7aff4871aa7a1883fa4d8c44b5551ab052d6", - "sha256:1444134990356c81d12f30e4b311379acfbbcd03e0bab591de2696a3b126d58e", - "sha256:1b5c314e39f596875e5a95dd81af03730b338c277c54a454226978d5ba95dbb6", - "sha256:346619f72eb0ddc4cf355ceffd225fa52506c92a2ff05318cfabd02a144e7c4e", - "sha256:44975e209c4827fc18a3486f257154d34ec6eaec0f90fef0cca1caa482db7064", - "sha256:61859fd7e40b8c71e609c202db5b0c1dbec0d5c7f1449dec2245575bdc866792", - "sha256:a5e232a0bf188362fa00123cc0bb842d363a292de7126126df5527b6a369586a", - "sha256:c14701409f311e7a9b7ec8e337f0815baf7ac95776cc78b419a1e6d49889a383", - "sha256:c7708f29d843fc2764310732e41f0ce27feadde453261859ec0fca7865dfc41b", - "sha256:c9009c527929f6e25604aec39b0a43c3f831d2947d89d6caaab22f057b7055c8", - "sha256:f5c71e29b4e2af7ccdc03a020c626ede51da471173b4a6ad1e904f2b2e04b4bd" + "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", + "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", + "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", + "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", + "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", + "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", + "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", + "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", + "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", + "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", + "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" ], "markers": "python_version < '3.5'", - "version": "==1.9.0" + "version": "==1.10.0" }, "six": { "hashes": [ diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000000..bf9d4a8650 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,69 @@ +name: Pipenv Build Rules +trigger: + batch: true + branches: + include: + - master + paths: + exclude: + - docs/* + - news/* + - peeps/* + - README.md + - pipenv/*.txt + - CHANGELOG.rst + - CONTRIBUTING.md + - CODE_OF_CONDUCT.md + - .gitignore + - .gitattributes + - .editorconfig + +jobs: +- job: TestLinux + pool: + vmImage: 'Ubuntu-16.04' + strategy: + matrix: + Python27: + python.version: '2.7' + Python36: + python.version: '3.6' + Python37: + python.version: '3.7' + maxParallel: 4 + steps: + - template: .azure-pipelines/steps/run-tests.yml + - template: .azure-pipelines/jobs/run-vendor-scripts.yml + +- job: TestWindows + pool: + vmImage: windows-2019 + strategy: + matrix: + Python27: + python.version: '2.7' + python.architecture: x64 + Python36: + python.version: '3.6' + python.architecture: x64 + Python37: + python.version: '3.7' + python.architecture: x64 + maxParallel: 4 + steps: + - template: .azure-pipelines/steps/run-tests.yml + +- job: TestMacOS + pool: + vmImage: macOS-10.13 + strategy: + matrix: + Python27: + python.version: '2.7' + Python36: + python.version: '3.6' + Python37: + python.version: '3.7' + maxParallel: 4 + steps: + - template: .azure-pipelines/steps/run-tests.yml diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 27b0586892..c8ca0dc8e4 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -30,7 +30,7 @@ KnownException = namedtuple( 'KnownException', ['exception_name', 'match_string', 'show_from_string', 'prefix'], ) - KnownException.__new__.__defaults__ = (None, None, None, "") + KnownException.__new__.__defaults__ = (None, None, None, "") KNOWN_EXCEPTIONS = [ KnownException("PermissionError", prefix="Permission Denied:"), diff --git a/setup.py b/setup.py index 235b5a109d..d0beff70f9 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,9 @@ "setuptools>=41.0.0", "virtualenv-clone>=0.2.5", "virtualenv", - 'enum34; python_version<"3"' + 'enum34; python_version<"3"', + # LEAVE THIS HERE!!! we have vendored dependencies that require it + 'typing; python_version<"3.5"' ] extras = { "dev": [ diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 84259ad369..419711e88f 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -400,7 +400,7 @@ index e54ae08..75b8208 100644 ireq.is_direct = True - reqset.add_requirement(ireq) + # reqset.add_requirement(ireq) - resolver = pip_shims.shims.Resolver(**resolver_kwargs) ++ resolver = pip_shims.shims.Resolver(**resolver_kwargs) resolver.require_hashes = False results = resolver._resolve_one(reqset, ireq) - reqset.cleanup_files() From cf6305ea7b9e77943d04920fbe4466f776d38776 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 16 May 2019 00:14:09 -0400 Subject: [PATCH 53/81] Fix piptools patch Signed-off-by: Dan Ryan --- tasks/vendoring/patches/patched/piptools.patch | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 419711e88f..14c3354ce1 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -161,8 +161,8 @@ index e54ae08..75b8208 100644 +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier + -+os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") -+import pip_shims.shims ++os.environ["PIP_SHIMS_BASE_MODULE"] = str("pip") ++import pip_shims +from pip_shims.shims import VcsSupport, WheelCache, InstallationError + + @@ -399,6 +399,7 @@ index e54ae08..75b8208 100644 reqset = RequirementSet() ireq.is_direct = True - reqset.add_requirement(ireq) +- resolver = PipResolver(**resolver_kwargs) + # reqset.add_requirement(ireq) + resolver = pip_shims.shims.Resolver(**resolver_kwargs) resolver.require_hashes = False From adc8d1316824d7777f461517dae1743f7c5e2f51 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 16 May 2019 00:41:15 -0400 Subject: [PATCH 54/81] Add dotenv patch to guard typing imports Signed-off-by: Dan Ryan --- pipenv/vendor/dotenv/__init__.py | 5 +- pipenv/vendor/dotenv/cli.py | 5 +- pipenv/vendor/dotenv/compat.py | 13 +++ pipenv/vendor/dotenv/main.py | 22 ++-- .../vendor/dotenv-typing-imports.patch | 110 ++++++++++++++++++ 5 files changed, 145 insertions(+), 10 deletions(-) create mode 100644 tasks/vendoring/patches/vendor/dotenv-typing-imports.patch diff --git a/pipenv/vendor/dotenv/__init__.py b/pipenv/vendor/dotenv/__init__.py index 1867868f71..b88d9bc274 100644 --- a/pipenv/vendor/dotenv/__init__.py +++ b/pipenv/vendor/dotenv/__init__.py @@ -1,6 +1,9 @@ -from typing import Any, Optional +from .compat import IS_TYPE_CHECKING from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values +if IS_TYPE_CHECKING: + from typing import Any, Optional + def load_ipython_extension(ipython): # type: (Any) -> None diff --git a/pipenv/vendor/dotenv/cli.py b/pipenv/vendor/dotenv/cli.py index 45f4b765e7..829b14adc3 100644 --- a/pipenv/vendor/dotenv/cli.py +++ b/pipenv/vendor/dotenv/cli.py @@ -1,6 +1,5 @@ import os import sys -from typing import Any, List try: import click @@ -9,9 +8,13 @@ 'Run pip install "python-dotenv[cli]" to fix this.') sys.exit(1) +from .compat import IS_TYPE_CHECKING from .main import dotenv_values, get_key, set_key, unset_key, run_command from .version import __version__ +if IS_TYPE_CHECKING: + from typing import Any, List + @click.group() @click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'), diff --git a/pipenv/vendor/dotenv/compat.py b/pipenv/vendor/dotenv/compat.py index 99ffb39b36..7a8694fc6e 100644 --- a/pipenv/vendor/dotenv/compat.py +++ b/pipenv/vendor/dotenv/compat.py @@ -1,3 +1,4 @@ +import os import sys if sys.version_info >= (3, 0): @@ -6,3 +7,15 @@ from StringIO import StringIO # noqa PY2 = sys.version_info[0] == 2 # type: bool + + +def is_type_checking(): + # type: () -> bool + try: + from typing import TYPE_CHECKING + except ImportError: + return False + return TYPE_CHECKING + + +IS_TYPE_CHECKING = os.environ.get("MYPY_RUNNING", is_type_checking()) diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py index 08122825eb..64d4269630 100644 --- a/pipenv/vendor/dotenv/main.py +++ b/pipenv/vendor/dotenv/main.py @@ -9,15 +9,17 @@ import sys from subprocess import Popen import tempfile -from typing import (Any, Dict, Iterator, List, Match, NamedTuple, Optional, # noqa - Pattern, Union, TYPE_CHECKING, Text, IO, Tuple) # noqa import warnings from collections import OrderedDict from contextlib import contextmanager -from .compat import StringIO, PY2 +from .compat import StringIO, PY2, IS_TYPE_CHECKING -if TYPE_CHECKING: # pragma: no cover +if IS_TYPE_CHECKING: # pragma: no cover + from typing import ( + Dict, Iterator, List, Match, Optional, Pattern, Union, + Text, IO, Tuple + ) if sys.version_info >= (3, 6): _PathLike = os.PathLike else: @@ -59,10 +61,14 @@ _escape_sequence = re.compile(r"\\[\\'\"abfnrtv]") # type: Pattern[Text] - -Binding = NamedTuple("Binding", [("key", Optional[Text]), - ("value", Optional[Text]), - ("original", Text)]) +try: + from typing import NamedTuple, Optional, Text + Binding = NamedTuple("Binding", [("key", Optional[Text]), + ("value", Optional[Text]), + ("original", Text)]) +except ImportError: + from collections import namedtuple + Binding = namedtuple("Binding", ["key", "value", "original"]) def decode_escapes(string): diff --git a/tasks/vendoring/patches/vendor/dotenv-typing-imports.patch b/tasks/vendoring/patches/vendor/dotenv-typing-imports.patch new file mode 100644 index 0000000000..386cecd131 --- /dev/null +++ b/tasks/vendoring/patches/vendor/dotenv-typing-imports.patch @@ -0,0 +1,110 @@ +diff --git a/pipenv/vendor/dotenv/__init__.py b/pipenv/vendor/dotenv/__init__.py +index 1867868..b88d9bc 100644 +--- a/pipenv/vendor/dotenv/__init__.py ++++ b/pipenv/vendor/dotenv/__init__.py +@@ -1,6 +1,9 @@ +-from typing import Any, Optional ++from .compat import IS_TYPE_CHECKING + from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values + ++if IS_TYPE_CHECKING: ++ from typing import Any, Optional ++ + + def load_ipython_extension(ipython): + # type: (Any) -> None +diff --git a/pipenv/vendor/dotenv/cli.py b/pipenv/vendor/dotenv/cli.py +index 45f4b76..829b14a 100644 +--- a/pipenv/vendor/dotenv/cli.py ++++ b/pipenv/vendor/dotenv/cli.py +@@ -1,6 +1,5 @@ + import os + import sys +-from typing import Any, List + + try: + import click +@@ -9,9 +8,13 @@ except ImportError: + 'Run pip install "python-dotenv[cli]" to fix this.') + sys.exit(1) + ++from .compat import IS_TYPE_CHECKING + from .main import dotenv_values, get_key, set_key, unset_key, run_command + from .version import __version__ + ++if IS_TYPE_CHECKING: ++ from typing import Any, List ++ + + @click.group() + @click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'), +diff --git a/pipenv/vendor/dotenv/compat.py b/pipenv/vendor/dotenv/compat.py +index 99ffb39..7a8694f 100644 +--- a/pipenv/vendor/dotenv/compat.py ++++ b/pipenv/vendor/dotenv/compat.py +@@ -1,3 +1,4 @@ ++import os + import sys + + if sys.version_info >= (3, 0): +@@ -6,3 +7,15 @@ else: + from StringIO import StringIO # noqa + + PY2 = sys.version_info[0] == 2 # type: bool ++ ++ ++def is_type_checking(): ++ # type: () -> bool ++ try: ++ from typing import TYPE_CHECKING ++ except ImportError: ++ return False ++ return TYPE_CHECKING ++ ++ ++IS_TYPE_CHECKING = os.environ.get("MYPY_RUNNING", is_type_checking()) +diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py +index 0812282..64d4269 100644 +--- a/pipenv/vendor/dotenv/main.py ++++ b/pipenv/vendor/dotenv/main.py +@@ -9,15 +9,17 @@ import shutil + import sys + from subprocess import Popen + import tempfile +-from typing import (Any, Dict, Iterator, List, Match, NamedTuple, Optional, # noqa +- Pattern, Union, TYPE_CHECKING, Text, IO, Tuple) # noqa + import warnings + from collections import OrderedDict + from contextlib import contextmanager + +-from .compat import StringIO, PY2 ++from .compat import StringIO, PY2, IS_TYPE_CHECKING + +-if TYPE_CHECKING: # pragma: no cover ++if IS_TYPE_CHECKING: # pragma: no cover ++ from typing import ( ++ Dict, Iterator, List, Match, Optional, Pattern, Union, ++ Text, IO, Tuple ++ ) + if sys.version_info >= (3, 6): + _PathLike = os.PathLike + else: +@@ -59,10 +61,14 @@ _binding = re.compile( + + _escape_sequence = re.compile(r"\\[\\'\"abfnrtv]") # type: Pattern[Text] + +- +-Binding = NamedTuple("Binding", [("key", Optional[Text]), +- ("value", Optional[Text]), +- ("original", Text)]) ++try: ++ from typing import NamedTuple, Optional, Text ++ Binding = NamedTuple("Binding", [("key", Optional[Text]), ++ ("value", Optional[Text]), ++ ("original", Text)]) ++except ImportError: ++ from collections import namedtuple ++ Binding = namedtuple("Binding", ["key", "value", "original"]) + + + def decode_escapes(string): From d8e66c7fceb224dc456b5df70ea15afe918e1127 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 16 May 2019 00:52:10 -0400 Subject: [PATCH 55/81] Add future imports to tests Signed-off-by: Dan Ryan --- tests/integration/conftest.py | 2 +- tests/integration/test_cli.py | 4 +++- tests/integration/test_dot_venv.py | 2 ++ tests/integration/test_install_basic.py | 2 ++ tests/integration/test_install_markers.py | 2 ++ tests/integration/test_install_twists.py | 2 ++ tests/integration/test_install_uri.py | 1 + tests/integration/test_pipenv.py | 2 ++ tests/integration/test_project.py | 1 + tests/integration/test_run.py | 2 ++ tests/integration/test_sync.py | 2 ++ tests/integration/test_uninstall.py | 2 ++ tests/integration/test_windows.py | 2 ++ 13 files changed, 24 insertions(+), 2 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 1fff03ab60..8848c05233 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -from __future__ import print_function +from __future__ import absolute_import, print_function import errno import json import os diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index a38883e0ed..82b17e17ed 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function """Tests to ensure `pipenv --option` works. """ @@ -47,7 +49,7 @@ def test_pipenv_site_packages(PipenvInstance): c = p.pipenv('--python python --site-packages') assert c.return_code == 0 assert 'Making site-packages available' in c.err - + # no-global-site-packages.txt under stdlib dir should not exist. c = p.pipenv('run python -c "import sysconfig; print(sysconfig.get_path(\'stdlib\'))"') assert c.return_code == 0 diff --git a/tests/integration/test_dot_venv.py b/tests/integration/test_dot_venv.py index db61d32198..07904b1637 100644 --- a/tests/integration/test_dot_venv.py +++ b/tests/integration/test_dot_venv.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import pytest diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 1af9b6af30..8b9df96cb7 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import pytest diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 1d3fb1bd13..9d62e98054 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import sys diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py index 08b2ef1e31..ddf5411743 100644 --- a/tests/integration/test_install_twists.py +++ b/tests/integration/test_install_twists.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import shutil import sys diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 8e79670cf7..3edc1e1154 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -1,4 +1,5 @@ # -*- coding=utf-8 -*- +from __future__ import absolute_import, print_function import pytest from flaky import flaky diff --git a/tests/integration/test_pipenv.py b/tests/integration/test_pipenv.py index ef8c23f28a..23ebfc4c5f 100644 --- a/tests/integration/test_pipenv.py +++ b/tests/integration/test_pipenv.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function """Misc. tests that don't fit anywhere. XXX: Try our best to reduce tests in this file. diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index d193fc7dcb..f9c02ee993 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -1,4 +1,5 @@ # -*- coding=utf-8 -*- +from __future__ import absolute_import, print_function import io import os import tarfile diff --git a/tests/integration/test_run.py b/tests/integration/test_run.py index 8cd3911528..28f97e4846 100644 --- a/tests/integration/test_run.py +++ b/tests/integration/test_run.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import pytest diff --git a/tests/integration/test_sync.py b/tests/integration/test_sync.py index c9e5c0577a..050e790ad5 100644 --- a/tests/integration/test_sync.py +++ b/tests/integration/test_sync.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import pytest diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py index e366861b0c..d850ed0cbf 100644 --- a/tests/integration/test_uninstall.py +++ b/tests/integration/test_uninstall.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import shutil diff --git a/tests/integration/test_windows.py b/tests/integration/test_windows.py index 9c2b707488..80fc4053c9 100644 --- a/tests/integration/test_windows.py +++ b/tests/integration/test_windows.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, print_function import os import pytest From 7e394c39e788c6776ceac7913432c66a411e2174 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 16 May 2019 01:24:47 -0400 Subject: [PATCH 56/81] Fix license download script Signed-off-by: Dan Ryan --- .../vendor/pythonfinder/pep514tools.LICENSE | 21 +++++++++++++++++++ tasks/vendoring/__init__.py | 11 +++++++++- 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 pipenv/vendor/pythonfinder/pep514tools.LICENSE diff --git a/pipenv/vendor/pythonfinder/pep514tools.LICENSE b/pipenv/vendor/pythonfinder/pep514tools.LICENSE new file mode 100644 index 0000000000..c7ac395fb9 --- /dev/null +++ b/pipenv/vendor/pythonfinder/pep514tools.LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Steve Dower + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 4b01ef0d21..aa0a15df79 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -90,6 +90,12 @@ } +LICENSE_RENAMES = { + "pythonfinder/LICENSE": "pythonfinder/pep514tools.LICENSE" +} + + + def drop_dir(path): if path.exists() and path.is_dir(): shutil.rmtree(str(path), ignore_errors=True) @@ -522,7 +528,7 @@ def download_licenses( if req.startswith("enum34"): exe_cmd = "{0} -d {1} {2}".format(enum_cmd, tmp_dir.as_posix(), req) else: - exe_cmd = "{0} --no-build-isolation --no-use-pep517 -d {1} {2}".format( + exe_cmd = "{0} --no-build-isolation -d {1} {2}".format( cmd, tmp_dir.as_posix(), req ) try: @@ -629,6 +635,9 @@ def license_destination(vendor_dir, libname, filename): return ( vendor_dir / override.parent ) / '{0}.{1}'.format(override.name, filename) + license_path = LIBRARY_DIRNAMES[libname] / filename + if license_path.as_posix() in LICENSE_RENAMES: + return vendor_dir / LICENSE_RENAMES[license_path.as_posix()] return vendor_dir / LIBRARY_DIRNAMES[libname] / filename # fallback to libname.LICENSE (used for nondirs) return vendor_dir / '{}.{}'.format(libname, filename) From cd4d8f22314ab64294f47ed6268dcf7107987e60 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 16 May 2019 02:02:44 -0400 Subject: [PATCH 57/81] Minor requirementslib update Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/requirements.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index cb8710db71..b8534c697e 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -939,7 +939,7 @@ def _parse_name_from_path(self): metadata = get_metadata(self.path) if metadata: name = metadata.get("name", "") - if name: + if name and name != "wheel": return name parsed_setup_cfg = self.parsed_setup_cfg if parsed_setup_cfg: @@ -2552,12 +2552,12 @@ def get_specifiers(self): return "" def update_name_from_path(self, path): - from .setup_info import get_metadata - metadata = get_metadata(path) name = self.name if metadata is not None: - name = metadata.get("name") + metadata_name = metadata.get("name") + if metadata_name and metadata_name != "wheel": + name = metadata_name if name is not None: if self.req.name is None: self.req.name = name From c23e57b4824e0553ea2e91a2a6c59bff811aac39 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Thu, 16 May 2019 02:16:52 -0400 Subject: [PATCH 58/81] Fix test and pipeline definitions - Update extras name in pipelines - fix windows virtualenv calls - don't write debug info during virtualenv creation and just move on - Generalize some pipelines - Use quotes around evaluated statements in pipeline - Fix envvar references - Update vistir: - fix stream wrapper on windows - fix ICACLS permissions handler - fix path normalization function - Update azure venv creation script - Fix vendoring script Signed-off-by: Dan Ryan --- .../steps/create-virtualenv-linux.yml | 4 +- .../steps/create-virtualenv-windows.yml | 85 +++++++++++++------ .azure-pipelines/steps/create-virtualenv.yml | 37 ++++++++ .../steps/install-dependencies.yml | 2 + .azure-pipelines/steps/run-tests-linux.yml | 12 ++- .azure-pipelines/steps/run-tests-windows.yml | 46 +++++++--- .azure-pipelines/steps/run-tests.yml | 21 ++--- Pipfile | 1 - pipenv/__init__.py | 18 ++-- pipenv/core.py | 4 +- pipenv/environments.py | 9 +- pipenv/vendor/requests/__init__.py | 4 +- pipenv/vendor/requests/__version__.py | 6 +- pipenv/vendor/requests/api.py | 4 +- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/vendor.txt | 2 +- pipenv/vendor/vistir/__init__.py | 2 +- pipenv/vendor/vistir/_winconsole.py | 41 ++++++--- pipenv/vendor/vistir/misc.py | 23 +++-- pipenv/vendor/vistir/path.py | 20 +++-- tasks/vendoring/__init__.py | 2 +- tests/integration/conftest.py | 22 +++-- tests/integration/test_project.py | 3 +- 23 files changed, 261 insertions(+), 109 deletions(-) create mode 100644 .azure-pipelines/steps/create-virtualenv.yml diff --git a/.azure-pipelines/steps/create-virtualenv-linux.yml b/.azure-pipelines/steps/create-virtualenv-linux.yml index e53893763e..519c5c5e6b 100644 --- a/.azure-pipelines/steps/create-virtualenv-linux.yml +++ b/.azure-pipelines/steps/create-virtualenv-linux.yml @@ -9,6 +9,8 @@ steps: echo "Path $PATH" echo "Installing Pipenv…" pipenv install --deploy --dev - pipenv run pip install -e "$(pwd)[test]" --upgrade + pipenv run pip install -e "$(pwd)[tests]" --upgrade echo pipenv --venv && echo pipenv --py && echo pipenv run python --version displayName: Make Virtualenv + env: + PYTHONWARNINGS: 'ignore:DEPRECATION' diff --git a/.azure-pipelines/steps/create-virtualenv-windows.yml b/.azure-pipelines/steps/create-virtualenv-windows.yml index 4dbdb685ae..3533820556 100644 --- a/.azure-pipelines/steps/create-virtualenv-windows.yml +++ b/.azure-pipelines/steps/create-virtualenv-windows.yml @@ -1,31 +1,62 @@ steps: +- script: | + echo "##vso[task.setvariable variable=LANG]C.UTF-8" + echo "##vso[task.setvariable variable=PIP_PROCESS_DEPENDENCY_LINKS]1" + displayName: Set Environment Variables + - powershell: | - $env:PY_EXE=$(python -c "import sys; print(sys.executable)") - if (!$env:PY_EXE) { - $env:PY_EXE="python" - } - Write-Host "##vso[task.setvariable variable=PY_EXE]$env:PY_EXE" - Write-Host "Found Python: $env:PY_EXE" - Invoke-Expression "& '$env:PY_EXE' -m virtualenv D:\.venv" - Write-Host "##vso[task.setvariable variable=VIRTUAL_ENV]D:\.venv" - Invoke-Expression "& 'D:\.venv\Scripts\activate.ps1'" - $env:VIRTUAL_ENV="D:\.venv" - Write-Host "Installing local package..." - Invoke-Expression "& '$env:PY_EXE' -m pip install -e .[test] --upgrade" - Write-Host "upgrading local package in virtual env" - $venv_scripts = Join-Path -path D:\.venv -childpath Scripts - $venv_py = Join-Path -path $venv_scripts -childpath python.exe - Write-Host "##vso[task.setvariable variable=VIRTUAL_ENV_PY]$venv_py" - Invoke-Expression "& '$venv_py' -m pip install -e .[test] --upgrade" - Write-Host "Installing pipenv development packages" - Invoke-Expression "& '$venv_py' -m pipenv install --dev" - Write-Host "Installing local package in pipenv environment" - Invoke-Expression "& '$venv_py' -m pipenv run pip install -e .[test]" - Write-Host "Printing metadata" - Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv --venv") - Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv --py") - Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv run python --version") - displayName: Make Virtualenv + pip install certifi + $env:PYTHON_PATH=$(python -c "import sys; print(sys.executable)") + $env:CERTIFI_CONTENT=$(python -m certifi) + echo "##vso[task.setvariable variable=GIT_SSL_CAINFO]$env:CERTIFI_CONTENT" + echo "##vso[task.setvariable variable=PY_EXE]$env:PYTHON_PATH" + displayName: Set Python Path + env: + PYTHONWARNINGS: 'ignore:DEPRECATION' + +- script: | + echo "Python path: $(PY_EXE)" + echo "GIT_SSL_CAINFO: $(GIT_SSL_CAINFO)" + $(PY_EXE) -m pipenv install --deploy --dev env: - PIPENV_DEFAULT_PYTHON_VERSION: $(PIPENV_DEFAULT_PYTHON_VERSION) + PIPENV_DEFAULT_PYTHON_VERSION: '$(PIPENV_DEFAULT_PYTHON_VERSION)' + PYTHONWARNINGS: 'ignore:DEPRECATION' + PIPENV_NOSPIN: '1' + displayName: Make Virtualenv + +# steps: + +# - powershell: | +# $env:PY_EXE=$(python -c "import sys; print(sys.executable)") +# if (!$env:PY_EXE) { +# $env:PY_EXE="python" +# } +# Write-Host "##vso[task.setvariable variable=PY_EXE]$env:PY_EXE" +# Write-Host "Found Python: $env:PY_EXE" +# Invoke-Expression "& '$env:PY_EXE' -m virtualenv D:\.venv" +# Write-Host "##vso[task.setvariable variable=VIRTUAL_ENV]D:\.venv" +# Invoke-Expression "& 'D:\.venv\Scripts\activate.ps1'" +# $env:VIRTUAL_ENV="D:\.venv" +# Write-Host "Installing local package..." +# Invoke-Expression "& '$env:PY_EXE' -m pip install -e .[tests] requests --upgrade" +# Write-Host "upgrading local package in virtual env" +# $venv_scripts = Join-Path -path D:\.venv -childpath Scripts +# $venv_py = Join-Path -path $venv_scripts -childpath python.exe +# Write-Host "##vso[task.setvariable variable=VIRTUAL_ENV_PY]$venv_py" +# Invoke-Expression "& '$venv_py' -m pip install -e .[tests] requests --upgrade" 2>&1 +# Write-Host "Installing pipenv development packages" +# Invoke-Expression "& '$venv_py' -m pipenv install --dev" 2>&1 +# # Write-Host "Installing local package in pipenv environment" +# # Invoke-Expression "& '$venv_py' -m pipenv run pip install -e .[tests] requests" 2>&1 +# # Write-Host "Printing metadata" +# # Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv --venv" 2>&1) +# # Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv --py" 2>&1) +# # Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv run python --version" 2>&1) +# displayName: Make Virtualenv +# failOnStderr: false +# env: +# PIPENV_DEFAULT_PYTHON_VERSION: $(PIPENV_DEFAULT_PYTHON_VERSION) +# PYTHONWARNINGS: 'ignore:DEPRECATION' +# PIPENV_VERBOSITY: '-1' +# PIPENV_NOSPIN: '1' diff --git a/.azure-pipelines/steps/create-virtualenv.yml b/.azure-pipelines/steps/create-virtualenv.yml new file mode 100644 index 0000000000..48bb2233ec --- /dev/null +++ b/.azure-pipelines/steps/create-virtualenv.yml @@ -0,0 +1,37 @@ +steps: + +- script: | + echo "##vso[task.setvariable variable=LANG]C.UTF-8" + echo "##vso[task.setvariable variable=PIP_PROCESS_DEPENDENCY_LINKS]1" + displayName: Set Environment Variables + +- ${{ if eq(parameters.vmImage, 'windows-2019') }}: + - powershell: | + pip install certifi + $env:PYTHON_PATH=$(python -c "import sys; print(sys.executable)") + $env:CERTIFI_CONTENT=$(python -m certifi) + echo "##vso[task.setvariable variable=GIT_SSL_CAINFO]$env:CERTIFI_CONTENT" + echo "##vso[task.setvariable variable=PY_EXE]$env:PYTHON_PATH" + displayName: Set Python Path + env: + PYTHONWARNINGS: 'ignore:DEPRECATION' +- ${{ if ne(parameters.vmImage, 'windows-2019') }}: + - bash: | + pip install certifi + PYTHON_PATH=$(python -c 'import sys; print(sys.executable)') + CERTIFI_CONTENT=$(python -m certifi) + echo "##vso[task.setvariable variable=GIT_SSL_CAINFO]$CERTIFI_CONTENT" + echo "##vso[task.setvariable variable=PY_EXE]$PYTHON_PATH" + displayName: Set Python Path + env: + PYTHONWARNINGS: 'ignore:DEPRECATION' + +- script: | + echo "Python path: $(PY_EXE)" + echo "GIT_SSL_CAINFO: $(GIT_SSL_CAINFO)" + $(PY_EXE) -m pipenv install --deploy --dev + env: + PIPENV_DEFAULT_PYTHON_VERSION: '$(PIPENV_DEFAULT_PYTHON_VERSION)' + PYTHONWARNINGS: 'ignore:DEPRECATION' + PIPENV_NOSPIN: '1' + displayName: Make Virtualenv diff --git a/.azure-pipelines/steps/install-dependencies.yml b/.azure-pipelines/steps/install-dependencies.yml index 1537640aac..16d2bd2842 100644 --- a/.azure-pipelines/steps/install-dependencies.yml +++ b/.azure-pipelines/steps/install-dependencies.yml @@ -1,3 +1,5 @@ steps: - script: 'python -m pip install --upgrade pip setuptools wheel && python -m pip install -e .[tests] --upgrade' displayName: Upgrade Pip & Install Pipenv + env: + PYTHONWARNINGS: 'ignore:DEPRECATION' diff --git a/.azure-pipelines/steps/run-tests-linux.yml b/.azure-pipelines/steps/run-tests-linux.yml index c49be8b1c6..02ea8cf8b9 100644 --- a/.azure-pipelines/steps/run-tests-linux.yml +++ b/.azure-pipelines/steps/run-tests-linux.yml @@ -1,8 +1,14 @@ - script: | # Fix Git SSL errors - export GIT_SSL_CAINFO="$(python -m certifi)" - export LANG="C.UTF-8" - export PIP_PROCESS_DEPENDENCY_LINKS="1" git submodule sync && git submodule update --init --recursive pipenv run pytest --junitxml=test-results.xml displayName: Run integration tests + env: + PYTHONWARNINGS: 'ignore:DEPRECATION' + PY_EXE: $(PY_EXE) + GIT_SSL_CAINFO: $(GIT_SSL_CAINFO) + LANG: $(LANG) + PIP_PROCESS_DEPENDENCY_LINKS: $(PIP_PROCESS_DEPENDENCY_LINKS) + PIPENV_DEFAULT_PYTHON_VERSION: $(PIPENV_DEFAULT_PYTHON_VERSION) + PYTHONWARNINGS: ignore:DEPRECATION + PIPENV_NOSPIN: '1' diff --git a/.azure-pipelines/steps/run-tests-windows.yml b/.azure-pipelines/steps/run-tests-windows.yml index cc3aac2777..7778f7c5a5 100644 --- a/.azure-pipelines/steps/run-tests-windows.yml +++ b/.azure-pipelines/steps/run-tests-windows.yml @@ -1,21 +1,39 @@ steps: - powershell: | - # Fix Git SSL errors - Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pip install certifi" - Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m certifi > cacert.txt" - Write-Host "##vso[task.setvariable variable=GIT_SSL_CAINFO]$(Get-Content cacert.txt)" - $env:GIT_SSL_CAINFO="$(Get-Content cacert.txt)" - # Shorten paths to get under MAX_PATH or else integration tests will fail - # https://bugs.python.org/issue18199 subst T: "$env:TEMP" Write-Host "##vso[task.setvariable variable=TEMP]T:\" - $env:TEMP='T:\' Write-Host "##vso[task.setvariable variable=TMP]T:\" - $env:TMP='T:\' - git submodule sync - git submodule update --init --recursive - Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests" + displayName: Fix Temp Variable + +- script: | + git submodule sync && git submodule update --init --recursive + pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests displayName: Run integration tests env: - VIRTUAL_ENV: $(VIRTUAL_ENV) - VIRTUAL_ENV_PY: $(VIRTUAL_ENV_PY) + PIPENV_DEFAULT_PYTHON_VERSION: $(PIPENV_DEFAULT_PYTHON_VERSION) + PYTHONWARNINGS: 'ignore:DEPRECATION' + PIPENV_NOSPIN: '1' + +# - powershell: | +# # Fix Git SSL errors +# Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pip install certifi" +# Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m certifi > cacert.txt" +# Write-Host "##vso[task.setvariable variable=GIT_SSL_CAINFO]$(Get-Content cacert.txt)" +# $env:GIT_SSL_CAINFO="$(Get-Content cacert.txt)" +# # Shorten paths to get under MAX_PATH or else integration tests will fail +# # https://bugs.python.org/issue18199 +# subst T: "$env:TEMP" +# Write-Host "##vso[task.setvariable variable=TEMP]T:\" +# $env:TEMP='T:\' +# Write-Host "##vso[task.setvariable variable=TMP]T:\" +# $env:TMP='T:\' +# Invoke-Expression "git submodule sync -q" 2>&1 +# Invoke-Expression "git submodule update --init --recursive -q" 2>&1 +# Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests" +# displayName: Run integration tests +# failOnStderr: false +# env: +# VIRTUAL_ENV: $(VIRTUAL_ENV) +# VIRTUAL_ENV_PY: $(VIRTUAL_ENV_PY) +# PYTHONWARNINGS: 'ignore:DEPRECATION' +# PIPENV_VERBOSITY: '-1' diff --git a/.azure-pipelines/steps/run-tests.yml b/.azure-pipelines/steps/run-tests.yml index cee9b84668..f559b8caec 100644 --- a/.azure-pipelines/steps/run-tests.yml +++ b/.azure-pipelines/steps/run-tests.yml @@ -1,22 +1,23 @@ steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(python.version) - architecture: '$(python.architecture)' - addToPath: true - displayName: Use Python $(python.version) +- task: UsePythonVersion@0 + inputs: + versionSpec: $(python.version) + architecture: '$(python.architecture)' + addToPath: true + displayName: Use Python $(python.version) + +- template: install-dependencies.yml - - template: install-dependencies.yml -steps: - script: | echo '##vso[task.setvariable variable=PIPENV_DEFAULT_PYTHON_VERSION]$(PYTHON_VERSION)' env: PYTHON_VERSION: $(python.version) + +- template: create-virtualenv.yml + - ${{ if eq(parameters.vmImage, 'windows-2019') }}: - - template: create-virtualenv-windows.yml - template: run-tests-windows.yml - ${{ if ne(parameters.vmImage, 'windows-2019') }}: - - template: create-virtualenv-linux.yml - template: run-tests-linux.yml - task: PublishTestResults@2 diff --git a/Pipfile b/Pipfile index a5c1fee0f3..826df207d7 100644 --- a/Pipfile +++ b/Pipfile @@ -16,4 +16,3 @@ tests = "bash ./run-tests.sh" [pipenv] allow_prereleases = true - diff --git a/pipenv/__init__.py b/pipenv/__init__.py index a83f94e8b1..695a493912 100644 --- a/pipenv/__init__.py +++ b/pipenv/__init__.py @@ -36,11 +36,19 @@ except Exception: pass -from .vendor.vistir.misc import replace_with_text_stream -from .vendor import colorama -replace_with_text_stream("stdout") -replace_with_text_stream("stderr") -# colorama.init(wrap=False) +from pipenv.vendor.vistir.misc import get_text_stream +stdout = get_text_stream("stdout") +stderr = get_text_stream("stderr") + +if os.name == "nt": + from pipenv.vendor.vistir.misc import _can_use_color, _wrap_for_color + if _can_use_color(stdout): + stdout = _wrap_for_color(stdout) + if _can_use_color(stderr): + stderr = _wrap_for_color(stderr) + +sys.stdout = stdout +sys.stderr = stderr from .cli import cli from . import resolver diff --git a/pipenv/core.py b/pipenv/core.py index dd73c71191..65541857f2 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -917,12 +917,12 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): pip_config = {} # Actually create the virtualenv. - with create_spinner("Creating virtual environment...") as sp: + with create_spinner(u"Creating virtual environment...") as sp: c = vistir.misc.run( cmd, verbose=False, return_object=True, write_to_stdout=False, combine_stderr=False, block=True, nospin=True, env=pip_config, ) - click.echo(crayons.blue("{0}".format(c.out)), err=True) + click.echo(crayons.blue(u"{0}".format(c.out)), err=True) if c.returncode != 0: sp.fail(environments.PIPENV_SPINNER_FAIL_TEXT.format(u"Failed creating virtual environment")) error = c.err if environments.is_verbose() else exceptions.prettify_exc(c.err) diff --git a/pipenv/environments.py b/pipenv/environments.py index 1408c99c77..34aef2bce7 100644 --- a/pipenv/environments.py +++ b/pipenv/environments.py @@ -3,10 +3,12 @@ import os import sys +from io import UnsupportedOperation + from appdirs import user_cache_dir from ._compat import fix_utf8 -from .vendor.vistir.misc import fs_str +from .vendor.vistir.misc import _isatty, fs_str # HACK: avoid resolver.py uses the wrong byte code files. @@ -263,7 +265,10 @@ def _is_env_truthy(name): ) # Internal, to tell whether the command line session is interactive. -SESSION_IS_INTERACTIVE = bool(os.isatty(sys.stdout.fileno())) +try: + SESSION_IS_INTERACTIVE = _isatty(sys.stdout.fileno()) +except UnsupportedOperation: + SESSION_IS_INTERACTIVE = _isatty(sys.stdout) # Internal, consolidated verbosity representation as an integer. The default diff --git a/pipenv/vendor/requests/__init__.py b/pipenv/vendor/requests/__init__.py index bc168ee533..9a899df67f 100644 --- a/pipenv/vendor/requests/__init__.py +++ b/pipenv/vendor/requests/__init__.py @@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version): # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.24 + # urllib3 >= 1.21.1, <= 1.25 assert major == 1 assert minor >= 21 - assert minor <= 24 + assert minor <= 25 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] diff --git a/pipenv/vendor/requests/__version__.py b/pipenv/vendor/requests/__version__.py index f5b5d03671..9844f740ab 100644 --- a/pipenv/vendor/requests/__version__.py +++ b/pipenv/vendor/requests/__version__.py @@ -5,10 +5,10 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'http://python-requests.org' -__version__ = '2.21.0' -__build__ = 0x022100 +__version__ = '2.22.0' +__build__ = 0x022200 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2018 Kenneth Reitz' +__copyright__ = 'Copyright 2019 Kenneth Reitz' __cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/pipenv/vendor/requests/api.py b/pipenv/vendor/requests/api.py index abada96d46..ef71d0759e 100644 --- a/pipenv/vendor/requests/api.py +++ b/pipenv/vendor/requests/api.py @@ -19,7 +19,7 @@ def request(method, url, **kwargs): :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. + in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. @@ -65,7 +65,7 @@ def get(url, params=None, **kwargs): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send - in the body of the :class:`Request`. + in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index a728058a9b..7f039c0751 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -10,7 +10,7 @@ from .models.pipfile import Pipfile from .models.requirements import Requirement -__version__ = "1.5.0" +__version__ = "1.5.1" logger = logging.getLogger(__name__) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index c5ce4c3f6f..c1342aa942 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -22,7 +22,7 @@ pipreqs==0.4.9 docopt==0.6.2 yarg==0.1.9 pythonfinder==1.2.1 -requests==2.21.0 +requests==2.22.0 chardet==3.0.4 idna==2.8 urllib3==1.25.2 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index 6ad359047d..821ea29b5a 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -36,7 +36,7 @@ from .path import create_tracked_tempdir, create_tracked_tempfile, mkdir_p, rmtree from .spin import create_spinner -__version__ = "0.4.1" +__version__ = "0.4.2" __all__ = [ diff --git a/pipenv/vendor/vistir/_winconsole.py b/pipenv/vendor/vistir/_winconsole.py index 22eea2cd94..a29c22d8fa 100644 --- a/pipenv/vendor/vistir/_winconsole.py +++ b/pipenv/vendor/vistir/_winconsole.py @@ -39,31 +39,35 @@ # the entire interpreter but just work in our little world of # echo and prmopt. +import ctypes import io import os import sys -import zlib import time -import ctypes -import msvcrt +import zlib from ctypes import ( - byref, POINTER, - c_int, + WINFUNCTYPE, + Structure, + byref, c_char, c_char_p, - c_void_p, + c_int, c_ssize_t, c_ulong, + c_void_p, + create_unicode_buffer, py_object, - Structure, windll, - WINFUNCTYPE, ) -from ctypes.wintypes import LPWSTR, LPCWSTR +from ctypes.wintypes import LPCWSTR, LPWSTR from itertools import count + +import msvcrt from six import PY2, text_type -from .misc import StreamWrapper, run + +from .compat import IS_TYPE_CHECKING +from .misc import StreamWrapper, run, to_text try: from ctypes import pythonapi @@ -74,6 +78,10 @@ pythonapi = None +if IS_TYPE_CHECKING: + from typing import Text + + c_ssize_p = POINTER(c_ssize_t) kernel32 = windll.kernel32 @@ -155,6 +163,15 @@ def get_buffer(obj, writable=False): PyBuffer_Release(byref(buf)) +def get_long_path(short_path): + # type: (Text, str) -> Text + BUFFER_SIZE = 500 + buffer = create_unicode_buffer(BUFFER_SIZE) + get_long_path_name = windll.kernel32.GetLongPathNameW + get_long_path_name(to_text(short_path), buffer, BUFFER_SIZE) + return buffer.value + + class _WindowsConsoleRawIOBase(io.RawIOBase): def __init__(self, handle): self.handle = handle @@ -232,6 +249,10 @@ def __init__(self, text_stream, byte_stream): def name(self): return self.buffer.name + @property + def fileno(self): + return self.buffer.fileno + def write(self, x): if isinstance(x, text_type): return self._text_stream.write(x) diff --git a/pipenv/vendor/vistir/misc.py b/pipenv/vendor/vistir/misc.py index b2df8f977d..8d58aad63e 100644 --- a/pipenv/vendor/vistir/misc.py +++ b/pipenv/vendor/vistir/misc.py @@ -741,14 +741,16 @@ def write(self, x): def write(self, x): # try to use backslash and surrogate escape strategies before failing - old_errors = getattr(self, "_errors", self.errors) self._errors = ( "backslashescape" if self.encoding != "mbcs" else "surrogateescape" ) try: return io.TextIOWrapper.write(self, to_text(x, errors=self._errors)) except UnicodeDecodeError: - self._errors = old_errors + if self._errors != "surrogateescape": + self._errors = "surrogateescape" + else: + self._errors = "replace" return io.TextIOWrapper.write(self, to_text(x, errors=self._errors)) def writelines(self, lines): @@ -841,6 +843,9 @@ def _isatty(stream): if colorama is not None: + def _is_wrapped_for_color(stream): + return isinstance(stream, (colorama.AnsiToWin32, colorama.ansitowin32.StreamWrapper)) + def _wrap_for_color(stream, color=None): try: cached = _color_stream_cache.get(stream) @@ -911,6 +916,8 @@ def get_text_stream(stream="stdout", encoding=None): sys_stream = stream_map[stream] windows_console = _get_windows_console_stream(sys_stream, encoding, None) if windows_console is not None: + if _can_use_color(windows_console): + return _wrap_for_color(windows_console) return windows_console return get_wrapped_stream(sys_stream, encoding) @@ -927,6 +934,11 @@ def get_text_stdin(): return get_text_stream("stdin") +_text_stdin = _cached_stream_lookup(lambda: sys.stdin, get_text_stdin) +_text_stdout = _cached_stream_lookup(lambda: sys.stdout, get_text_stdout) +_text_stderr = _cached_stream_lookup(lambda: sys.stderr, get_text_stderr) + + TEXT_STREAMS = { "stdin": get_text_stdin, "stdout": get_text_stdout, @@ -934,11 +946,6 @@ def get_text_stdin(): } -_text_stdin = _cached_stream_lookup(lambda: sys.stdin, get_text_stdin) -_text_stdout = _cached_stream_lookup(lambda: sys.stdout, get_text_stdout) -_text_stderr = _cached_stream_lookup(lambda: sys.stderr, get_text_stderr) - - def replace_with_text_stream(stream_name): """Given a stream name, replace the target stream with a text-converted equivalent @@ -1009,7 +1016,7 @@ def echo(text, fg=None, bg=None, style=None, file=None, err=False, color=None): text = colorize(text, fg=fg, bg=bg, attrs=style) if not can_use_color or (os.name == "nt" and not _wrap_for_color): text = ANSI_REMOVAL_RE.sub("", text) - elif os.name == "nt" and _wrap_for_color: + elif os.name == "nt" and _wrap_for_color and not _is_wrapped_for_color(file): file = _wrap_for_color(file, color=color) if text: file.write(text) diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 76bdf7869e..d5b02f641b 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -103,11 +103,13 @@ def normalize_path(path): :rtype: str """ - return os.path.normpath( - os.path.normcase( - os.path.abspath(os.path.expandvars(os.path.expanduser(str(path)))) - ) - ) + path = os.path.abspath(os.path.expandvars(os.path.expanduser(str(path)))) + if os.name == "nt" and os.path.exists(path): + from ._winconsole import get_long_path + + path = get_long_path(path) + + return os.path.normpath(os.path.normcase(path)) def is_in_path(path, parent): @@ -345,18 +347,18 @@ def set_write_bit(fn): from .misc import run if user_sid: - _, err = run( + c = run( [ icacls_exe, + "''{0}''".format(fn), "/grant", "{0}:WD".format(user_sid), - "''{0}''".format(fn), "/T", "/C", "/Q", - ] + ], nospin=True, return_object=True ) - if not err: + if not c.err and c.returncode == 0: return if not os.path.isdir(fn): diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index aa0a15df79..edb64bc8b6 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -635,7 +635,7 @@ def license_destination(vendor_dir, libname, filename): return ( vendor_dir / override.parent ) / '{0}.{1}'.format(override.name, filename) - license_path = LIBRARY_DIRNAMES[libname] / filename + license_path = Path(LIBRARY_DIRNAMES[libname]) / filename if license_path.as_posix() in LICENSE_RENAMES: return vendor_dir / LICENSE_RENAMES[license_path.as_posix()] return vendor_dir / LIBRARY_DIRNAMES[libname] / filename diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 8848c05233..8681f76d12 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -12,6 +12,7 @@ import pytest from vistir.compat import ResourceWarning, fs_str, fs_encode, FileNotFoundError, PermissionError, TemporaryDirectory +from vistir.misc import run from vistir.contextmanagers import temp_environ from vistir.path import mkdir_p, create_tracked_tempdir, handle_remove_readonly @@ -246,6 +247,7 @@ def __init__( venv_root=None, ignore_virtualenvs=True, venv_in_project=True, name=None ): self.pypi = pypi + os.environ["PYTHONWARNINGS"] = "ignore:DEPRECATION" if ignore_virtualenvs: os.environ["PIPENV_IGNORE_VIRTUALENVS"] = fs_str("1") if venv_root: @@ -419,10 +421,17 @@ def __exit__(self, *args, **kwargs): os.environ = self._old_environ def create(self): - python = Path(sys.executable).as_posix() - cmd = "{0} -m virtualenv {1}".format(python, self.path.as_posix()) - c = delegator.run(cmd, block=True) - assert c.return_code == 0 + python = Path(sys.executable).absolute().as_posix() + cmd = [ + python, "-m", "virtualenv", self.path.absolute().as_posix() + ] + c = run( + cmd, verbose=False, return_object=True, write_to_stdout=False, + combine_stderr=False, block=True, nospin=True, + ) + # cmd = "{0} -m virtualenv {1}".format(python, self.path.as_posix()) + # c = delegator.run(cmd, block=True) + assert c.returncode == 0 def activate(self): script_path = "Scripts" if os.name == "nt" else "bin" @@ -432,7 +441,10 @@ def activate(self): code = compile(f.read(), str(activate_this), "exec") exec(code, dict(__file__=str(activate_this))) os.environ["VIRTUAL_ENV"] = str(self.path) - return self.path + try: + return self.path.absolute().resolve() + except OSError: + return self.path.absolute() else: raise VirtualenvActivationException("Can't find the activate_this.py script.") diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py index f9c02ee993..4adf46ab88 100644 --- a/tests/integration/test_project.py +++ b/tests/integration/test_project.py @@ -9,6 +9,7 @@ from pipenv.patched import pipfile from pipenv.project import Project from pipenv.utils import temp_environ +from pipenv.vendor.vistir.path import is_in_path import pipenv.environments @@ -184,7 +185,7 @@ def test_run_in_virtualenv_with_global_context(PipenvInstance, pypi, virtualenv) assert c.return_code == 0 c = p.pipenv('run python -c "import click;print(click.__file__)"') assert c.return_code == 0 - assert c.out.strip().startswith(str(virtualenv)) + assert is_in_path(c.out.strip(), str(virtualenv)) c = p.pipenv("clean --dry-run") assert c.return_code == 0 assert "click" in c.out From 05651cb95ed5cefb7cdf6a220c3ee21d77b3c503 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sat, 18 May 2019 23:29:53 -0400 Subject: [PATCH 59/81] Add black config and remove unused azure yaml Signed-off-by: Dan Ryan --- .../steps/create-virtualenv-windows.yml | 36 ------------------- .azure-pipelines/steps/run-tests-windows.yml | 24 ------------- pyproject.toml | 26 ++++++++++++++ 3 files changed, 26 insertions(+), 60 deletions(-) diff --git a/.azure-pipelines/steps/create-virtualenv-windows.yml b/.azure-pipelines/steps/create-virtualenv-windows.yml index 3533820556..b992e2100b 100644 --- a/.azure-pipelines/steps/create-virtualenv-windows.yml +++ b/.azure-pipelines/steps/create-virtualenv-windows.yml @@ -24,39 +24,3 @@ steps: PYTHONWARNINGS: 'ignore:DEPRECATION' PIPENV_NOSPIN: '1' displayName: Make Virtualenv - -# steps: - -# - powershell: | -# $env:PY_EXE=$(python -c "import sys; print(sys.executable)") -# if (!$env:PY_EXE) { -# $env:PY_EXE="python" -# } -# Write-Host "##vso[task.setvariable variable=PY_EXE]$env:PY_EXE" -# Write-Host "Found Python: $env:PY_EXE" -# Invoke-Expression "& '$env:PY_EXE' -m virtualenv D:\.venv" -# Write-Host "##vso[task.setvariable variable=VIRTUAL_ENV]D:\.venv" -# Invoke-Expression "& 'D:\.venv\Scripts\activate.ps1'" -# $env:VIRTUAL_ENV="D:\.venv" -# Write-Host "Installing local package..." -# Invoke-Expression "& '$env:PY_EXE' -m pip install -e .[tests] requests --upgrade" -# Write-Host "upgrading local package in virtual env" -# $venv_scripts = Join-Path -path D:\.venv -childpath Scripts -# $venv_py = Join-Path -path $venv_scripts -childpath python.exe -# Write-Host "##vso[task.setvariable variable=VIRTUAL_ENV_PY]$venv_py" -# Invoke-Expression "& '$venv_py' -m pip install -e .[tests] requests --upgrade" 2>&1 -# Write-Host "Installing pipenv development packages" -# Invoke-Expression "& '$venv_py' -m pipenv install --dev" 2>&1 -# # Write-Host "Installing local package in pipenv environment" -# # Invoke-Expression "& '$venv_py' -m pipenv run pip install -e .[tests] requests" 2>&1 -# # Write-Host "Printing metadata" -# # Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv --venv" 2>&1) -# # Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv --py" 2>&1) -# # Write-Host $(Invoke-Expression "& '$venv_py' -m pipenv run python --version" 2>&1) -# displayName: Make Virtualenv -# failOnStderr: false -# env: -# PIPENV_DEFAULT_PYTHON_VERSION: $(PIPENV_DEFAULT_PYTHON_VERSION) -# PYTHONWARNINGS: 'ignore:DEPRECATION' -# PIPENV_VERBOSITY: '-1' -# PIPENV_NOSPIN: '1' diff --git a/.azure-pipelines/steps/run-tests-windows.yml b/.azure-pipelines/steps/run-tests-windows.yml index 7778f7c5a5..db782c01af 100644 --- a/.azure-pipelines/steps/run-tests-windows.yml +++ b/.azure-pipelines/steps/run-tests-windows.yml @@ -13,27 +13,3 @@ steps: PIPENV_DEFAULT_PYTHON_VERSION: $(PIPENV_DEFAULT_PYTHON_VERSION) PYTHONWARNINGS: 'ignore:DEPRECATION' PIPENV_NOSPIN: '1' - -# - powershell: | -# # Fix Git SSL errors -# Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pip install certifi" -# Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m certifi > cacert.txt" -# Write-Host "##vso[task.setvariable variable=GIT_SSL_CAINFO]$(Get-Content cacert.txt)" -# $env:GIT_SSL_CAINFO="$(Get-Content cacert.txt)" -# # Shorten paths to get under MAX_PATH or else integration tests will fail -# # https://bugs.python.org/issue18199 -# subst T: "$env:TEMP" -# Write-Host "##vso[task.setvariable variable=TEMP]T:\" -# $env:TEMP='T:\' -# Write-Host "##vso[task.setvariable variable=TMP]T:\" -# $env:TMP='T:\' -# Invoke-Expression "git submodule sync -q" 2>&1 -# Invoke-Expression "git submodule update --init --recursive -q" 2>&1 -# Invoke-Expression "& '$env:VIRTUAL_ENV_PY' -m pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests" -# displayName: Run integration tests -# failOnStderr: false -# env: -# VIRTUAL_ENV: $(VIRTUAL_ENV) -# VIRTUAL_ENV_PY: $(VIRTUAL_ENV_PY) -# PYTHONWARNINGS: 'ignore:DEPRECATION' -# PIPENV_VERBOSITY: '-1' diff --git a/pyproject.toml b/pyproject.toml index a799764c8d..9929dac75c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,32 @@ [build-system] requires = ["setuptools", "wheel"] +[tool.black] +line-length = 90 +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.github + | \.hg + | \.mypy_cache + | \.tox + | \.pyre_configuration + | \.venv + | _build + | buck-out + | build + | dist + | pipenv/vendor + | pipenv/patched + | tests/pypi + | tests/pytest-pypi + | tests/test_artifacts + | get-pipenv.py +) +''' + [tool.towncrier] package = "pipenv" filename = "CHANGELOG.rst" From 86192bb6012bb5fed04e6a0a90bb73dfb42d6d03 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 19 May 2019 00:26:46 -0400 Subject: [PATCH 60/81] make sure we publish test results from test runs Signed-off-by: Dan Ryan --- .azure-pipelines/jobs/run-tests-windows.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.azure-pipelines/jobs/run-tests-windows.yml b/.azure-pipelines/jobs/run-tests-windows.yml index 05573b107e..8640683bfa 100644 --- a/.azure-pipelines/jobs/run-tests-windows.yml +++ b/.azure-pipelines/jobs/run-tests-windows.yml @@ -16,3 +16,10 @@ steps: - template: ../steps/create-virtualenv-windows.yml - template: ../steps/run-tests-windows.yml + +- task: PublishTestResults@2 + displayName: Publish Test Results + inputs: + testResultsFiles: '**/test-results.xml' + testRunTitle: 'Python $(python.version)' + condition: succeededOrFailed() From 43d61d824241a37de9255906a7889e6e559a9d19 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 19 May 2019 02:32:50 -0400 Subject: [PATCH 61/81] Update requirementslib with retry for failed wheels - Fixes #3692 Signed-off-by: Dan Ryan --- pipenv/vendor/requirementslib/models/setup_info.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 872ae4caa4..b3251f71b2 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -1102,8 +1102,12 @@ def build_sdist(self): def build(self): # type: () -> "SetupInfo" dist_path = None + metadata = None try: dist_path = self.build_wheel() + metadata = self.get_metadata_from_wheel( + os.path.join(self.extra_kwargs["build_dir"], dist_path) + ) except Exception: try: dist_path = self.build_sdist() @@ -1112,12 +1116,8 @@ def build(self): self.populate_metadata(metadata) except Exception: pass - else: - metadata = self.get_metadata_from_wheel( - os.path.join(self.extra_kwargs["build_dir"], dist_path) - ) - if metadata: - self.populate_metadata(metadata) + if metadata: + self.populate_metadata(metadata) if not self.metadata or not self.name: metadata = self.get_egg_metadata() if metadata: From b06c07ca72602b5d2ef34554798ffe64b7f8a2cc Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 19 May 2019 18:56:40 -0400 Subject: [PATCH 62/81] Update vendoring script Signed-off-by: Dan Ryan --- pipenv/vendor/vendor.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index c1342aa942..61d59e7c1f 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -27,7 +27,7 @@ requests==2.22.0 idna==2.8 urllib3==1.25.2 certifi==2019.3.9 -requirementslib==1.5.0 +requirementslib==1.5.1 attrs==19.1.0 distlib==0.2.9 packaging==19.0 @@ -40,7 +40,7 @@ semver==2.8.1 shutilwhich==1.1.0 toml==0.10.0 cached-property==1.5.1 -vistir==0.4.1 +vistir==0.4.2 pip-shims==0.3.2 enum34==1.1.6 yaspin==0.14.3 From e6b2f6463a905268641158564a23ccef0251586e Mon Sep 17 00:00:00 2001 From: frostming Date: Mon, 20 May 2019 12:45:09 +0800 Subject: [PATCH 63/81] add news entry --- news/3745.bugfix.rst | 1 + tests/integration/test_cli.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 news/3745.bugfix.rst diff --git a/news/3745.bugfix.rst b/news/3745.bugfix.rst new file mode 100644 index 0000000000..229047a40d --- /dev/null +++ b/news/3745.bugfix.rst @@ -0,0 +1 @@ +Normalize the package names to lowercase when comparing used and in-Pipfile packages. diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index 0af5bedb8e..27314b7354 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -217,6 +217,7 @@ def test_install_parse_error(PipenvInstance, pypi): @pytest.mark.code @pytest.mark.check @pytest.mark.unused +@pytest.mark.needs_internet(reason='required by check') def test_check_unused(PipenvInstance, pypi): with PipenvInstance(chdir=True, pypi=pypi) as p: with open('__init__.py', 'w') as f: From 2dce8355e6ae3df727a35ddf95198de1bcb0a2b1 Mon Sep 17 00:00:00 2001 From: frostming Date: Mon, 20 May 2019 14:28:02 +0800 Subject: [PATCH 64/81] let's see what's happening --- pipenv/core.py | 3 +++ pipenv/vendor/pipreqs/pipreqs.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/pipenv/core.py b/pipenv/core.py index 3e62b19ca6..f90c60d1a0 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -243,12 +243,15 @@ def import_from_code(path="."): rs = [] try: for r in pipreqs.get_all_imports(path, encoding="utf-8"): + click.echo(r) if r not in BAD_PACKAGES: rs.append(r) pkg_names = pipreqs.get_pkg_names(rs) return [proper_case(r) for r in pkg_names] except Exception: + import traceback + traceback.print_exc() return [] diff --git a/pipenv/vendor/pipreqs/pipreqs.py b/pipenv/vendor/pipreqs/pipreqs.py index 791168a99d..c0466adf52 100644 --- a/pipenv/vendor/pipreqs/pipreqs.py +++ b/pipenv/vendor/pipreqs/pipreqs.py @@ -68,7 +68,7 @@ def get_all_imports(path, encoding=None, extra_ignore_dirs=None): candidates.append(os.path.basename(root)) files = [fn for fn in files if os.path.splitext(fn)[1] == ".py"] - + print(root, files) candidates += [os.path.splitext(fn)[0] for fn in files] for file_name in files: with open_func(os.path.join(root, file_name), "r", encoding=encoding) as f: From 457a42a69c990f8174c9c0848ed2f1a2b1a2b429 Mon Sep 17 00:00:00 2001 From: frostming Date: Mon, 20 May 2019 16:04:58 +0800 Subject: [PATCH 65/81] exclude venv folders --- tests/integration/test_cli.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index 27314b7354..ae5db17638 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -220,7 +220,8 @@ def test_install_parse_error(PipenvInstance, pypi): @pytest.mark.needs_internet(reason='required by check') def test_check_unused(PipenvInstance, pypi): with PipenvInstance(chdir=True, pypi=pypi) as p: - with open('__init__.py', 'w') as f: + os.makedirs('mypackage') + with open('mypackage/__init__.py', 'w') as f: contents = """ import tablib import records @@ -233,6 +234,6 @@ def test_check_unused(PipenvInstance, pypi): assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'tablib', 'flask']) - c = p.pipenv('check --unused .') + c = p.pipenv('check --unused mypackage') assert 'tablib' not in c.out assert 'flask' not in c.out From bc37beaef43de351a1a215bea1c6fad678774233 Mon Sep 17 00:00:00 2001 From: frostming Date: Mon, 20 May 2019 16:42:18 +0800 Subject: [PATCH 66/81] cleanup --- pipenv/core.py | 7 +++---- pipenv/vendor/pipreqs/pipreqs.py | 2 +- tests/integration/test_cli.py | 9 +++------ 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/pipenv/core.py b/pipenv/core.py index f90c60d1a0..7f04ac4999 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -242,16 +242,15 @@ def import_from_code(path="."): rs = [] try: - for r in pipreqs.get_all_imports(path, encoding="utf-8"): - click.echo(r) + for r in pipreqs.get_all_imports( + path, encoding="utf-8", extra_ignore_dirs=[".venv"] + ): if r not in BAD_PACKAGES: rs.append(r) pkg_names = pipreqs.get_pkg_names(rs) return [proper_case(r) for r in pkg_names] except Exception: - import traceback - traceback.print_exc() return [] diff --git a/pipenv/vendor/pipreqs/pipreqs.py b/pipenv/vendor/pipreqs/pipreqs.py index c0466adf52..791168a99d 100644 --- a/pipenv/vendor/pipreqs/pipreqs.py +++ b/pipenv/vendor/pipreqs/pipreqs.py @@ -68,7 +68,7 @@ def get_all_imports(path, encoding=None, extra_ignore_dirs=None): candidates.append(os.path.basename(root)) files = [fn for fn in files if os.path.splitext(fn)[1] == ".py"] - print(root, files) + candidates += [os.path.splitext(fn)[0] for fn in files] for file_name in files: with open_func(os.path.join(root, file_name), "r", encoding=encoding) as f: diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index ae5db17638..f131ae4440 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -220,20 +220,17 @@ def test_install_parse_error(PipenvInstance, pypi): @pytest.mark.needs_internet(reason='required by check') def test_check_unused(PipenvInstance, pypi): with PipenvInstance(chdir=True, pypi=pypi) as p: - os.makedirs('mypackage') - with open('mypackage/__init__.py', 'w') as f: + with open('__init__.py', 'w') as f: contents = """ import tablib import records import flask """.strip() f.write(contents) - p.pipenv('install requests') - p.pipenv('install tablib') - p.pipenv('install flask') + p.pipenv('install requests tablib flask') assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'tablib', 'flask']) - c = p.pipenv('check --unused mypackage') + c = p.pipenv('check --unused .') assert 'tablib' not in c.out assert 'flask' not in c.out From 35b2101770daa21b12f37768d03dc08ab85be443 Mon Sep 17 00:00:00 2001 From: Cologler <10906962+Cologler@users.noreply.github.com> Date: Mon, 20 May 2019 18:45:28 +0800 Subject: [PATCH 67/81] Update core.py #3694 --- pipenv/core.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pipenv/core.py b/pipenv/core.py index 7f04ac4999..425c784ae2 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1775,6 +1775,13 @@ def ensure_lockfile(keep_outdated=False, pypi_mirror=None): def do_py(system=False): + if not project.virtualenv_exists: + click.echo( + crayons.red("location not created nor specified"), + err=True, + ) + return + try: click.echo(which("python", allow_global=system)) except AttributeError: From a3556e9d7fa4cfad4013d022efa8d33e9026cd05 Mon Sep 17 00:00:00 2001 From: Cologler <10906962+Cologler@users.noreply.github.com> Date: Mon, 20 May 2019 22:40:16 +0800 Subject: [PATCH 68/81] Update core.py --- pipenv/core.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pipenv/core.py b/pipenv/core.py index 425c784ae2..2ba8af7f7b 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1777,7 +1777,11 @@ def ensure_lockfile(keep_outdated=False, pypi_mirror=None): def do_py(system=False): if not project.virtualenv_exists: click.echo( - crayons.red("location not created nor specified"), + "{}({}){}".format( + crayons.red("No virtualenv has been created for this project "), + crayons.white(project.project_directory, bold=True), + crayons.red(" yet!") + ), err=True, ) return From 6a80d3674f0edea342d6ad63f17ebd160f55d2e2 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 26 May 2019 17:16:52 -0400 Subject: [PATCH 69/81] Add news entry for PR 3684 --- news/3684.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/3684.trivial.rst diff --git a/news/3684.trivial.rst b/news/3684.trivial.rst new file mode 100644 index 0000000000..64561ec747 --- /dev/null +++ b/news/3684.trivial.rst @@ -0,0 +1 @@ +Cleaned up some conditional logic that would always evaluate ``True``. From e79a9568a8cc220f193b0bd3e202ec297c883a65 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Sun, 26 May 2019 17:21:54 -0400 Subject: [PATCH 70/81] Add news entry for PR 3479 --- news/3479.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/3479.bugfix.rst diff --git a/news/3479.bugfix.rst b/news/3479.bugfix.rst new file mode 100644 index 0000000000..15e8e0f652 --- /dev/null +++ b/news/3479.bugfix.rst @@ -0,0 +1 @@ +Fixed an issue which caused ``pipenv install --help`` to show duplicate entries for ``--pre``. From 78029a6d134683e79275255b4acc7c9c9db93a7b Mon Sep 17 00:00:00 2001 From: frostming Date: Mon, 27 May 2019 08:53:51 +0800 Subject: [PATCH 71/81] news entry --- news/3753.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/3753.trivial.rst diff --git a/news/3753.trivial.rst b/news/3753.trivial.rst new file mode 100644 index 0000000000..2ab71d388f --- /dev/null +++ b/news/3753.trivial.rst @@ -0,0 +1 @@ +Improve the error message of ``pipenv --py`` when virtualenv can't be found. From 71478d1b55c55ff28fdcff6b8582e222a0017d22 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 May 2019 00:00:23 -0400 Subject: [PATCH 72/81] Fix keyerror in keep_outdated when using VCS dependencies Signed-off-by: Dan Ryan --- pipenv/resolver.py | 11 +-- .../requirementslib/models/setup_info.py | 91 +++++++++++++++++-- setup.py | 2 +- 3 files changed, 85 insertions(+), 19 deletions(-) diff --git a/pipenv/resolver.py b/pipenv/resolver.py index ca42b44cbc..c4f02607c5 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -237,7 +237,8 @@ def get_cleaned_dict(self, keep_outdated=False): if entry_hashes != locked_hashes and not self.is_updated: self.entry_dict["hashes"] = list(entry_hashes | locked_hashes) self.entry_dict["name"] = self.name - self.entry_dict["version"] = self.strip_version(self.entry_dict["version"]) + if "version" in self.entry_dict: + self.entry_dict["version"] = self.strip_version(self.entry_dict["version"]) _, self.entry_dict = self.get_markers_from_dict(self.entry_dict) return self.entry_dict @@ -779,14 +780,6 @@ def main(): warnings.simplefilter("ignore", category=ResourceWarning) replace_with_text_stream("stdout") replace_with_text_stream("stderr") - # from pipenv.vendor import colorama - # if os.name == "nt" and ( - # all(getattr(stream, method, None) for stream in [sys.stdout, sys.stderr] for method in ["write", "isatty"]) and - # all(stream.isatty() for stream in [sys.stdout, sys.stderr]) - # ): - # colorama.init(wrap=False) - # elif os.name != "nt": - # colorama.init() os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = str("1") os.environ["PYTHONIOENCODING"] = str("utf-8") os.environ["PYTHONUNBUFFERED"] = str("1") diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index b3251f71b2..8b41ecac18 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -160,6 +160,19 @@ def parse_special_directives(setup_entry, package_dir=None): sys.path.insert(0, package_dir) if "." in resource: resource, _, attribute = resource.rpartition(".") + package, _, path = resource.partition(".") + base_path = os.path.join(package_dir, package) + if path: + path = os.path.join(base_path, os.path.join(*path.split("."))) + else: + path = base_path + if not os.path.exists(path) and os.path.exists("{0}.py".format(path)): + path = "{0}.py".format(path) + elif os.path.isdir(path): + path = os.path.join(path, "__init__.py") + rv = ast_parse_attribute_from_file(path, attribute) + if rv: + return str(rv) module = importlib.import_module(resource) rv = getattr(module, attribute) if not isinstance(rv, six.string_types): @@ -203,10 +216,10 @@ def setuptools_parse_setup_cfg(path): def get_package_dir_from_setupcfg(parser, base_dir=None): # type: (configparser.ConfigParser, STRING_TYPE) -> Text - if not base_dir: - package_dir = os.getcwd() - else: + if base_dir is not None: package_dir = base_dir + else: + package_dir = os.getcwd() if parser.has_option("options", "packages.find"): pkg_dir = parser.get("options", "packages.find") if isinstance(package_dir, Mapping): @@ -217,6 +230,15 @@ def get_package_dir_from_setupcfg(parser, base_dir=None): _, pkg_dir = pkg_dir.split("find:") pkg_dir = pkg_dir.strip() package_dir = os.path.join(package_dir, pkg_dir) + elif os.path.exists(os.path.join(package_dir, "setup.py")): + setup_py = ast_parse_setup_py(os.path.join(package_dir, "setup.py")) + if "package_dir" in setup_py: + package_lookup = setup_py["package_dir"] + if not isinstance(package_lookup, Mapping): + return package_lookup + return package_lookup.get( + next(iter(list(package_lookup.keys()))), package_dir + ) return package_dir @@ -665,13 +687,31 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no unparsed = item elif six.PY3 and isinstance(item, ast.NameConstant): unparsed = item.value + elif isinstance(item, ast.Attribute): + attr_name = getattr(item, "value", None) + attr_attr = getattr(item, "attr", None) + name = unparse(attr_name) if attr_name is not None else attr_attr + if initial_mapping: + unparsed = item + elif name and attr_attr: + if not initial_mapping and isinstance(name, six.string_types): + unparsed = ".".join([item for item in (name, attr_attr) if item]) + else: + unparsed = item + elif attr_attr and not name: + unparsed = attr_attr + else: + unparsed = name elif isinstance(item, ast.Call): unparsed = {} if isinstance(item.func, ast.Name): - name = unparse(item.func) - unparsed[name] = {} + func_name = unparse(item.func) + elif isinstance(item.func, ast.Attribute): + func_name = unparse(item.func) + if func_name: + unparsed[func_name] = {} for keyword in item.keywords: - unparsed[name].update(unparse(keyword)) + unparsed[func_name].update(unparse(keyword)) elif isinstance(item, ast.keyword): unparsed = {unparse(item.arg): unparse(item.value)} elif isinstance(item, ast.Assign): @@ -704,15 +744,48 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no return unparsed -def ast_parse_setup_py(path): - # type: (S) -> Dict[Any, Any] +def ast_parse_attribute_from_file(path, attribute): + # type: (S) -> Any + analyzer = ast_parse_file(path) + target_value = None + for k, v in analyzer.assignments.items(): + name = "" + if isinstance(k, ast.Name): + name = k.id + elif isinstance(k, ast.Attribute): + fn = ast_unparse(k) + if isinstance(fn, six.string_types): + _, _, name = fn.rpartition(".") + if name == attribute: + target_value = ast_unparse(v, analyzer=analyzer) + break + if isinstance(target_value, Mapping) and attribute in target_value: + return target_value[attribute] + return target_value + + +def ast_parse_file(path): + # type: (S) -> Analyzer with open(path, "r") as fh: tree = ast.parse(fh.read()) ast_analyzer = Analyzer() ast_analyzer.visit(tree) + return ast_analyzer + + +def ast_parse_setup_py(path): + # type: (S) -> Dict[Any, Any] + ast_analyzer = ast_parse_file(path) setup = {} # type: Dict[Any, Any] for k, v in ast_analyzer.function_map.items(): - if isinstance(k, ast.Name) and k.id == "setup": + fn_name = "" + if isinstance(k, ast.Name): + fn_name = k.id + elif isinstance(k, ast.Attribute): + fn = ast_unparse(k) + if isinstance(fn, six.string_types): + _, _, fn_name = fn.rpartition(".") + if fn_name == "setup": setup = v cleaned_setup = ast_unparse(setup, analyzer=ast_analyzer) return cleaned_setup diff --git a/setup.py b/setup.py index d0beff70f9..c251e49adb 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ required = [ "pip>=18.0", "certifi", - "setuptools>=41.0.0", + "setuptools>=36.2.1", "virtualenv-clone>=0.2.5", "virtualenv", 'enum34; python_version<"3"', From 3a75696b20340693e3f78910796398b8508618e3 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 May 2019 00:02:22 -0400 Subject: [PATCH 73/81] Add news entry Signed-off-by: Dan Ryan --- news/3768.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/3768.bugfix.rst diff --git a/news/3768.bugfix.rst b/news/3768.bugfix.rst new file mode 100644 index 0000000000..8efe019787 --- /dev/null +++ b/news/3768.bugfix.rst @@ -0,0 +1 @@ +Fixed a ``KeyError`` which could occur when pinning outdated VCS dependencies via ``pipenv lock --keep-outdated``. From 51e2be70ca66dceaa3f22406385a673a488f0f35 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 May 2019 02:38:54 -0400 Subject: [PATCH 74/81] Update requirementslib to fix recursion error Signed-off-by: Dan Ryan --- pipenv/utils.py | 6 +++--- .../requirementslib/models/requirements.py | 6 +++--- .../vendor/requirementslib/models/setup_info.py | 16 ++++++++++------ 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/pipenv/utils.py b/pipenv/utils.py index 61d771ce74..b73b7fa737 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -985,8 +985,8 @@ def resolve(cmd, sp): _out = decode_output("{0}\n".format(_out)) out += _out sp.text = to_native_string("{0}".format(_out[:100])) - # if environments.is_verbose(): - # sp.hide_and_write(_out.rstrip()) + if environments.is_verbose(): + sp.hide_and_write(_out.rstrip()) _out = to_native_string("") if not result and not _out: break @@ -2019,7 +2019,7 @@ def find_python(finder, line=None): ) if line and os.path.isabs(line): if os.name == "nt": - line = posixpath.join(*line.split(os.path.sep)) + line = make_posix(line) return line if not finder: from pipenv.vendor.pythonfinder import Finder diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index b8534c697e..559ab424d1 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -814,14 +814,14 @@ def vcsrepo(self): @cached_property def metadata(self): # type: () -> Dict[Any, Any] - if self.is_local and is_installable_dir(self.path): + if self.is_local and self.path and is_installable_dir(self.path): return get_metadata(self.path) return {} @cached_property def parsed_setup_cfg(self): # type: () -> Dict[Any, Any] - if self.is_local and is_installable_dir(self.path): + if self.is_local and self.path and is_installable_dir(self.path): if self.setup_cfg: return parse_setup_cfg(self.setup_cfg) return {} @@ -829,7 +829,7 @@ def parsed_setup_cfg(self): @cached_property def parsed_setup_py(self): # type: () -> Dict[Any, Any] - if self.is_local and is_installable_dir(self.path): + if self.is_local and self.path and is_installable_dir(self.path): if self.setup_py: return ast_parse_setup_py(self.setup_py) return {} diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py index 8b41ecac18..0dd5b3c83e 100644 --- a/pipenv/vendor/requirementslib/models/setup_info.py +++ b/pipenv/vendor/requirementslib/models/setup_info.py @@ -660,7 +660,7 @@ def match_assignment_name(self, match): def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # noqa:C901 # type: (Any, bool, Optional[Analyzer], bool) -> Union[List[Any], Dict[Any, Any], Tuple[Any, ...], STRING_TYPE] - unparse = partial(ast_unparse, initial_mapping=initial_mapping, analyzer=analyzer) + unparse = partial(ast_unparse, initial_mapping=initial_mapping, analyzer=analyzer, recurse=recurse) if isinstance(item, ast.Dict): unparsed = dict(zip(unparse(item.keys), unparse(item.values))) elif isinstance(item, ast.List): @@ -690,18 +690,22 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no elif isinstance(item, ast.Attribute): attr_name = getattr(item, "value", None) attr_attr = getattr(item, "attr", None) - name = unparse(attr_name) if attr_name is not None else attr_attr + name = None if initial_mapping: unparsed = item - elif name and attr_attr: + elif attr_name and not recurse: + name = attr_name + else: + name = unparse(attr_name) if attr_name is not None else attr_attr + if name and attr_attr: if not initial_mapping and isinstance(name, six.string_types): unparsed = ".".join([item for item in (name, attr_attr) if item]) else: unparsed = item - elif attr_attr and not name: + elif attr_attr and not name and not initial_mapping: unparsed = attr_attr else: - unparsed = name + unparsed = name if not unparsed else unparsed elif isinstance(item, ast.Call): unparsed = {} if isinstance(item.func, ast.Name): @@ -721,7 +725,7 @@ def ast_unparse(item, initial_mapping=False, analyzer=None, recurse=True): # no # XXX: Original reference if not initial_mapping: target = unparse(next(iter(item.targets)), recurse=False) - val = unparse(item.value) + val = unparse(item.value, recurse=False) if isinstance(target, (tuple, set, list)): unparsed = dict(zip(target, val)) else: From 788194d749f4737d9608807aa081c0213043760f Mon Sep 17 00:00:00 2001 From: Jordan Pittier Date: Thu, 23 May 2019 15:58:45 +0200 Subject: [PATCH 75/81] Doc: advanced.rst: Use "pytest" as the new recommended entrypoint for Pytest According to https://github.com/pytest-dev/pytest/issues/1629 the recommended entrypoint for `pytest` is `pytest`. Usage of `py.test` has been deprecated for 2 years now. --- docs/advanced.rst | 6 +++--- news/3759.doc.rst | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 news/3759.doc.rst diff --git a/docs/advanced.rst b/docs/advanced.rst index 3ac323e659..906544362a 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -467,7 +467,7 @@ and the corresponding Makefile:: pipenv install --dev test: - pipenv run py.test tests + pipenv run pytest tests Tox Automation Project @@ -483,7 +483,7 @@ and external testing:: deps = pipenv commands= pipenv install --dev - pipenv run py.test tests + pipenv run pytest tests [testenv:flake8-py3] basepython = python3.4 @@ -492,7 +492,7 @@ and external testing:: pipenv run flake8 --version pipenv run flake8 setup.py docs project test -Pipenv will automatically use the virtualenv provided by ``tox``. If ``pipenv install --dev`` installs e.g. ``pytest``, then installed command ``py.test`` will be present in given virtualenv and can be called directly by ``py.test tests`` instead of ``pipenv run py.test tests``. +Pipenv will automatically use the virtualenv provided by ``tox``. If ``pipenv install --dev`` installs e.g. ``pytest``, then installed command ``pytest`` will be present in given virtualenv and can be called directly by ``pytest tests`` instead of ``pipenv run pytest tests``. You might also want to add ``--ignore-pipfile`` to ``pipenv install``, as to not accidentally modify the lock-file on each test run. This causes Pipenv diff --git a/news/3759.doc.rst b/news/3759.doc.rst new file mode 100644 index 0000000000..5aebd29e78 --- /dev/null +++ b/news/3759.doc.rst @@ -0,0 +1 @@ +Updated the documentation with the new ``pytest`` entrypoint. From d56a81038387a81d4065dafc57afb512c64902a0 Mon Sep 17 00:00:00 2001 From: Manoj Jadhav Date: Sun, 5 May 2019 08:52:45 +0530 Subject: [PATCH 76/81] fixed broken link for diagnose documentation --- .github/ISSUE_TEMPLATE/Bug_report.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/Bug_report.md b/.github/ISSUE_TEMPLATE/Bug_report.md index c470a86779..ae0bbba031 100644 --- a/.github/ISSUE_TEMPLATE/Bug_report.md +++ b/.github/ISSUE_TEMPLATE/Bug_report.md @@ -5,7 +5,7 @@ about: Create a report to help us improve Be sure to check the existing issues (both open and closed!), and make sure you are running the latest version of Pipenv. -Check the [diagnose documentation](https://docs.pipenv.org/diagnose/) for common issues before posting! We may close your issue if it is very similar to one of them. Please be considerate, or be on your way. +Check the [diagnose documentation](/docs/diagnose.rst) for common issues before posting! We may close your issue if it is very similar to one of them. Please be considerate, or be on your way. Make sure to mention your debugging experience if the documented solution failed. From d841dd54ec59de21dbffce1c0f32202e4046baee Mon Sep 17 00:00:00 2001 From: Manoj Jadhav Date: Mon, 20 May 2019 09:10:29 +0530 Subject: [PATCH 77/81] Updated Diagnose URL --- .github/ISSUE_TEMPLATE/Bug_report.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/Bug_report.md b/.github/ISSUE_TEMPLATE/Bug_report.md index ae0bbba031..3dc4613eb0 100644 --- a/.github/ISSUE_TEMPLATE/Bug_report.md +++ b/.github/ISSUE_TEMPLATE/Bug_report.md @@ -5,7 +5,7 @@ about: Create a report to help us improve Be sure to check the existing issues (both open and closed!), and make sure you are running the latest version of Pipenv. -Check the [diagnose documentation](/docs/diagnose.rst) for common issues before posting! We may close your issue if it is very similar to one of them. Please be considerate, or be on your way. +Check the [diagnose documentation](https://docs.pipenv.org/en/latest/diagnose/) for common issues before posting! We may close your issue if it is very similar to one of them. Please be considerate, or be on your way. Make sure to mention your debugging experience if the documented solution failed. From 3402ed961581ff5704fb9badf988a481d6b75c0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gilberto=20Corr=C3=AAa=20de=20Souza?= Date: Tue, 9 Oct 2018 10:42:58 -0300 Subject: [PATCH 78/81] clarified the use of environment variables --- docs/advanced.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/advanced.rst b/docs/advanced.rst index 906544362a..328d6b79c9 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -353,12 +353,12 @@ To prevent pipenv from loading the ``.env`` file, set the ``PIPENV_DONT_LOAD_ENV ☤ Custom Script Shortcuts ------------------------- -Pipenv supports creating custom shortcuts in the (optional) ``[scripts]`` section of your Pipfile. +Pipenv supports creating custom shortcuts in the (optional) ``[scripts]`` section of your Pipfile. You can then run ``pipenv run `` in your terminal to run the command in the -context of your pipenv virtual environment even if you have not activated the pipenv shell first. +context of your pipenv virtual environment even if you have not activated the pipenv shell first. -For example, in your Pipfile:: +For example, in your Pipfile:: [scripts] printspam = "python -c \"print('I am a silly example, no one would need to do this')\"" @@ -380,7 +380,7 @@ For example:: ☤ Support for Environment Variables ----------------------------------- -Pipenv supports the usage of environment variables in values. For example:: +Pipenv supports the usage of environment variables in values, only in the ``[[source]]`` section. For example:: [[source]] url = "https://${PYPI_USERNAME}:${PYPI_PASSWORD}@my_private_repo.example.com/simple" @@ -394,8 +394,8 @@ Pipenv supports the usage of environment variables in values. For example:: maya = {version="*", index="pypi"} records = "*" -Environment variables may be specified as ``${MY_ENVAR}`` or ``$MY_ENVAR``. -On Windows, ``%MY_ENVAR%`` is supported in addition to ``${MY_ENVAR}`` or ``$MY_ENVAR``. +Environment variables is better specified as ``${MY_ENVAR}``. +But they may be ``$MY_ENVAR`` or ``%MY_ENVAR%`` on Windows. ☤ Configuration With Environment Variables From 4b108dfd493b68a75bcb3e2cffbc537cb7564654 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gilberto=20Corr=C3=AAa=20de=20Souza?= Date: Thu, 11 Oct 2018 14:10:26 -0300 Subject: [PATCH 79/81] change wording for Environment Variables options --- docs/advanced.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/advanced.rst b/docs/advanced.rst index 328d6b79c9..710e6da309 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -394,8 +394,8 @@ Pipenv supports the usage of environment variables in values, only in the ``[[so maya = {version="*", index="pypi"} records = "*" -Environment variables is better specified as ``${MY_ENVAR}``. -But they may be ``$MY_ENVAR`` or ``%MY_ENVAR%`` on Windows. +Environment variables is better specified as ``${MY_ENVAR}`` or ``$MY_ENVAR``. +On Windows, ``%MY_ENVAR%`` is supported in addition to ``${MY_ENVAR}`` or ``$MY_ENVAR``. ☤ Configuration With Environment Variables From 0e56fef36f25dd3b6994d7323e7a3948b48e59a5 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 May 2019 13:00:14 -0400 Subject: [PATCH 80/81] Clean up env variable documentation Signed-off-by: Dan Ryan --- docs/advanced.rst | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/docs/advanced.rst b/docs/advanced.rst index 710e6da309..bdd0d704dc 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -358,7 +358,9 @@ Pipenv supports creating custom shortcuts in the (optional) ``[scripts]`` sectio You can then run ``pipenv run `` in your terminal to run the command in the context of your pipenv virtual environment even if you have not activated the pipenv shell first. -For example, in your Pipfile:: +For example, in your Pipfile: + +.. code-block:: toml [scripts] printspam = "python -c \"print('I am a silly example, no one would need to do this')\"" @@ -369,18 +371,25 @@ And then in your terminal:: I am a silly example, no one would need to do this Commands that expect arguments will also work. -For example:: +For example: +.. code-block:: toml [scripts] echospam = "echo I am really a very silly example" +:: + $ pipenv run echospam "indeed" I am really a very silly example indeed ☤ Support for Environment Variables ----------------------------------- -Pipenv supports the usage of environment variables in values, only in the ``[[source]]`` section. For example:: +Pipenv supports the usage of environment variables in place of authentication fragments +in your Pipfile. These will only be parsed if they are present in the ``[[source]]`` +section. For example: + +.. code-block:: toml [[source]] url = "https://${PYPI_USERNAME}:${PYPI_PASSWORD}@my_private_repo.example.com/simple" @@ -394,7 +403,8 @@ Pipenv supports the usage of environment variables in values, only in the ``[[so maya = {version="*", index="pypi"} records = "*" -Environment variables is better specified as ``${MY_ENVAR}`` or ``$MY_ENVAR``. +Environment variables may be specified as ``${MY_ENVAR}`` or ``$MY_ENVAR``. + On Windows, ``%MY_ENVAR%`` is supported in addition to ``${MY_ENVAR}`` or ``$MY_ENVAR``. From abee697c14a55825ed690bce03ab339e2eaf69dd Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Mon, 27 May 2019 13:23:00 -0400 Subject: [PATCH 81/81] Added news entry Signed-off-by: Dan Ryan --- news/2317.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/2317.doc.rst diff --git a/news/2317.doc.rst b/news/2317.doc.rst new file mode 100644 index 0000000000..ff56fe4df3 --- /dev/null +++ b/news/2317.doc.rst @@ -0,0 +1 @@ +Added documenation about variable expansion in ``Pipfile`` entries.