From 5ac676711af2a4f1e3265cd0303af9af58f88876 Mon Sep 17 00:00:00 2001 From: Dan Ryan Date: Fri, 5 Jul 2019 02:06:43 -0400 Subject: [PATCH] Fix editable VCS dependency resolution - Fixes #3809 Signed-off-by: Dan Ryan --- Pipfile | 1 - pipenv/cli/command.py | 6 +- pipenv/environment.py | 204 +++++++++++++++-- pipenv/project.py | 10 +- pipenv/utils.py | 116 +++++++++- .../requirementslib/models/requirements.py | 28 ++- pipenv/vendor/requirementslib/models/utils.py | 4 + tests/fixtures/fake-package/.coveragerc | 27 +++ tests/fixtures/fake-package/.editorconfig | 27 +++ tests/fixtures/fake-package/.gitignore | 108 +++++++++ .../fake-package/.pre-commit-config.yaml | 20 ++ tests/fixtures/fake-package/.travis.yml | 38 ++++ tests/fixtures/fake-package/LICENSE | 13 ++ tests/fixtures/fake-package/MANIFEST.in | 19 ++ tests/fixtures/fake-package/Pipfile | 14 ++ tests/fixtures/fake-package/README.rst | 3 + tests/fixtures/fake-package/appveyor.yml | 61 +++++ tests/fixtures/fake-package/docs/conf.py | 208 ++++++++++++++++++ .../fake-package/docs/requirements.txt | 2 + tests/fixtures/fake-package/news/.gitignore | 1 + tests/fixtures/fake-package/pyproject.toml | 50 +++++ tests/fixtures/fake-package/setup.cfg | 120 ++++++++++ tests/fixtures/fake-package/setup.py | 35 +++ .../fake-package/src/fake_package/__init__.py | 1 + .../fake-package/tasks/CHANGELOG.rst.jinja2 | 40 ++++ tests/fixtures/fake-package/tasks/__init__.py | 175 +++++++++++++++ tests/fixtures/fake-package/tox.ini | 37 ++++ tests/integration/test_cli.py | 8 +- tests/integration/test_install_basic.py | 4 +- tests/integration/test_install_markers.py | 6 +- tests/integration/test_install_uri.py | 4 +- tests/integration/test_lock.py | 14 +- tests/pypi | 2 +- tests/pytest-pypi/pytest_pypi/app.py | 36 ++- .../pytest_pypi/templates/package_pypi.html | 4 + 35 files changed, 1386 insertions(+), 60 deletions(-) create mode 100644 tests/fixtures/fake-package/.coveragerc create mode 100644 tests/fixtures/fake-package/.editorconfig create mode 100644 tests/fixtures/fake-package/.gitignore create mode 100644 tests/fixtures/fake-package/.pre-commit-config.yaml create mode 100644 tests/fixtures/fake-package/.travis.yml create mode 100644 tests/fixtures/fake-package/LICENSE create mode 100644 tests/fixtures/fake-package/MANIFEST.in create mode 100644 tests/fixtures/fake-package/Pipfile create mode 100644 tests/fixtures/fake-package/README.rst create mode 100644 tests/fixtures/fake-package/appveyor.yml create mode 100644 tests/fixtures/fake-package/docs/conf.py create mode 100644 tests/fixtures/fake-package/docs/requirements.txt create mode 100644 tests/fixtures/fake-package/news/.gitignore create mode 100644 tests/fixtures/fake-package/pyproject.toml create mode 100644 tests/fixtures/fake-package/setup.cfg create mode 100644 tests/fixtures/fake-package/setup.py create mode 100644 tests/fixtures/fake-package/src/fake_package/__init__.py create mode 100644 tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 create mode 100644 tests/fixtures/fake-package/tasks/__init__.py create mode 100644 tests/fixtures/fake-package/tox.ini create mode 100644 tests/pytest-pypi/pytest_pypi/templates/package_pypi.html diff --git a/Pipfile b/Pipfile index 95d10d5174..826df207d7 100644 --- a/Pipfile +++ b/Pipfile @@ -2,7 +2,6 @@ pipenv = {path = ".", editable = true, extras = ["tests", "dev"]} sphinx-click = "*" click = "*" -"path.py" = "<12.0" pytest_pypi = {path = "./tests/pytest-pypi", editable = true} stdeb = {version="*", markers="sys_platform == 'linux'"} jedi = "*" diff --git a/pipenv/cli/command.py b/pipenv/cli/command.py index a897cff00a..d1bcaa939d 100644 --- a/pipenv/cli/command.py +++ b/pipenv/cli/command.py @@ -8,9 +8,9 @@ argument, echo, edit, group, option, pass_context, secho, version_option ) -import click_completion -import crayons -import delegator +from ..vendor import click_completion +from ..vendor import delegator +from ..patched import crayons from ..__version__ import __version__ from .options import ( diff --git a/pipenv/environment.py b/pipenv/environment.py index a8489d955b..4a6940197e 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -1,7 +1,9 @@ # -*- coding=utf-8 -*- +from __future__ import absolute_import, print_function import contextlib import importlib +import io import json import operator import os @@ -18,7 +20,7 @@ import pipenv from .vendor.cached_property import cached_property -import vistir +from .vendor import vistir from .utils import normalize_path, make_posix @@ -46,6 +48,9 @@ def __init__(self, prefix=None, is_venv=False, base_working_set=None, pipfile=No self.extra_dists = [] prefix = prefix if prefix else sys.prefix self.prefix = vistir.compat.Path(prefix) + self._base_paths = {} + if self.is_venv: + self._base_paths = self.get_paths() self.sys_paths = get_paths() def safe_import(self, name): @@ -117,6 +122,13 @@ def find_libdir(self): @property def python_info(self): include_dir = self.prefix / "include" + if not os.path.exists(include_dir): + include_dirs = self.get_include_path() + if include_dirs: + include_path = include_dirs.get("include", include_dirs.get("platinclude")) + if not include_path: + return {} + include_dir = vistir.compat.Path(include_path) python_path = next(iter(list(include_dir.iterdir())), None) if python_path and python_path.name.startswith("python"): python_version = python_path.name.replace("python", "") @@ -165,17 +177,39 @@ def base_paths(self): """ prefix = make_posix(self.prefix.as_posix()) - install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' - paths = get_paths(install_scheme, vars={ - 'base': prefix, - 'platbase': prefix, - }) - current_version = get_python_version() - for k in list(paths.keys()): - if not os.path.exists(paths[k]): - paths[k] = self._replace_parent_version(paths[k], current_version) + paths = {} + if self._base_paths: + paths = self._base_paths.copy() + else: + try: + paths = self.get_paths() + except Exception: + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) + current_version = get_python_version() + try: + for k in list(paths.keys()): + if not os.path.exists(paths[k]): + paths[k] = self._replace_parent_version(paths[k], current_version) + except OSError: + # Sometimes virtualenvs are made using virtualenv interpreters and there is no + # include directory, which will cause this approach to fail. This failsafe + # will make sure we fall back to the shell execution to find the real include path + paths = self.get_include_path() + paths.update(self.get_lib_paths()) + paths["scripts"] = self.script_basedir + if not paths: + install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + paths = get_paths(install_scheme, vars={ + 'base': prefix, + 'platbase': prefix, + }) if not os.path.exists(paths["purelib"]) and not os.path.exists(paths["platlib"]): - paths = self.get_paths() + lib_paths = self.get_lib_paths() + paths.update(lib_paths) paths["PATH"] = paths["scripts"] + os.pathsep + os.defpath if "prefix" not in paths: paths["prefix"] = prefix @@ -232,6 +266,47 @@ def sys_path(self): path = sys.path return path + def build_command(self, python_lib=False, python_inc=False, scripts=False, py_version=False): + """Build the text for running a command in the given environment + + :param python_lib: Whether to include the python lib dir commands, defaults to False + :type python_lib: bool, optional + :param python_inc: Whether to include the python include dir commands, defaults to False + :type python_inc: bool, optional + :param scripts: Whether to include the scripts directory, defaults to False + :type scripts: bool, optional + :param py_version: Whether to include the python version info, defaults to False + :type py_version: bool, optional + :return: A string representing the command to run + """ + pylib_lines = [] + pyinc_lines = [] + py_command = ( + "import sysconfig, distutils.sysconfig, io, json, sys; paths = {{" + "%s }}; value = u'{{0}}'.format(json.dumps(paths));" + "fh = io.open('{0}', 'w'); fh.write(value); fh.close()" + ) + distutils_line = "distutils.sysconfig.get_python_{0}(plat_specific={1})" + sysconfig_line = "sysconfig.get_path('{0}')" + if python_lib: + for key, var, val in (("pure", "lib", "0"), ("plat", "lib", "1")): + dist_prefix = "{0}lib".format(key) + # XXX: We need to get 'stdlib' or 'platstdlib' + sys_prefix = "{0}stdlib".format("" if key == "pure" else key) + pylib_lines.append("u'%s': u'{{0}}'.format(%s)" % (dist_prefix, distutils_line.format(var, val))) + pylib_lines.append("u'%s': u'{{0}}'.format(%s)" % (sys_prefix, sysconfig_line.format(sys_prefix))) + if python_inc: + for key, var, val in (("include", "inc", "0"), ("platinclude", "inc", "1")): + pylib_lines.append("u'%s': u'{{0}}'.format(%s)" % (key, distutils_line.format(var, val))) + lines = pylib_lines + pyinc_lines + if scripts: + lines.append("u'scripts': u'{{0}}'.format(%s)" % sysconfig_line.format("scripts")) + if py_version: + lines.append("u'py_version_short': u'{{0}}'.format(distutils.sysconfig.get_python_version()),") + lines_as_str = u",".join(lines) + py_command = py_command % lines_as_str + return py_command + def get_paths(self): """ Get the paths for the environment by running a subcommand @@ -239,21 +314,108 @@ def get_paths(self): :return: The python paths for the environment :rtype: Dict[str, str] """ - prefix = make_posix(self.prefix.as_posix()) - install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' + tmpfile = vistir.path.create_tracked_tempfile(suffix=".json") + tmpfile.close() + tmpfile_path = make_posix(tmpfile.name) + py_command = self.build_command(python_lib=True, python_inc=True, scripts=True, py_version=True) + command = [self.python, "-c", py_command.format(tmpfile_path)] + c = vistir.misc.run( + command, return_object=True, block=True, nospin=True, write_to_stdout=False + ) + if c.returncode == 0: + paths = {} + with io.open(tmpfile_path, "r", encoding="utf-8") as fh: + paths = json.load(fh) + if "purelib" in paths: + paths["libdir"] = paths["purelib"] = make_posix(paths["purelib"]) + for key in ("platlib", "scripts", "platstdlib", "stdlib", "include", "platinclude"): + if key in paths: + paths[key] = make_posix(paths[key]) + return paths + else: + vistir.misc.echo("Failed to load paths: {0}".format(c.err), fg="yellow") + vistir.misc.echo("Output: {0}".format(c.out), fg="yellow") + return None + + def get_lib_paths(self): + """Get the include path for the environment + + :return: The python include path for the environment + :rtype: Dict[str, str] + """ + tmpfile = vistir.path.create_tracked_tempfile(suffix=".json") + tmpfile.close() + tmpfile_path = make_posix(tmpfile.name) + py_command = self.build_command(python_lib=True) + command = [self.python, "-c", py_command.format(tmpfile_path)] + c = vistir.misc.run( + command, return_object=True, block=True, nospin=True, write_to_stdout=False + ) + paths = None + if c.returncode == 0: + paths = {} + with io.open(tmpfile_path, "r", encoding="utf-8") as fh: + paths = json.load(fh) + if "purelib" in paths: + paths["libdir"] = paths["purelib"] = make_posix(paths["purelib"]) + for key in ("platlib", "platstdlib", "stdlib"): + if key in paths: + paths[key] = make_posix(paths[key]) + return paths + else: + vistir.misc.echo("Failed to load paths: {0}".format(c.err), fg="yellow") + vistir.misc.echo("Output: {0}".format(c.out), fg="yellow") + if not paths: + if not self.prefix.joinpath("lib").exists(): + return {} + stdlib_path = next(iter([ + p for p in self.prefix.joinpath("lib").iterdir() + if p.name.startswith("python") + ]), None) + lib_path = None + if stdlib_path: + lib_path = next(iter([ + p.as_posix() for p in stdlib_path.iterdir() + if p.name == "site-packages" + ])) + paths = {"stdlib": stdlib_path.as_posix()} + if lib_path: + paths["purelib"] = lib_path + return paths + return {} + + def get_include_path(self): + """Get the include path for the environment + + :return: The python include path for the environment + :rtype: Dict[str, str] + """ + tmpfile = vistir.path.create_tracked_tempfile(suffix=".json") + tmpfile.close() + tmpfile_path = make_posix(tmpfile.name) py_command = ( - "import sysconfig, json, distutils.sysconfig;" - "paths = sysconfig.get_paths('{0}', vars={{'base': '{1}', 'platbase': '{1}'}}" - ");paths['purelib'] = distutils.sysconfig.get_python_lib(plat_specific=0, " - "prefix='{1}');paths['platlib'] = distutils.sysconfig.get_python_lib(" - "plat_specific=1, prefix='{1}');print(json.dumps(paths))" + "import distutils.sysconfig, io, json, sys; paths = {{u'include': " + "u'{{0}}'.format(distutils.sysconfig.get_python_inc(plat_specific=0)), " + "u'platinclude': u'{{0}}'.format(distutils.sysconfig.get_python_inc(" + "plat_specific=1)) }}; value = u'{{0}}'.format(json.dumps(paths));" + "fh = io.open('{0}', 'w'); fh.write(value); fh.close()" ) - command = [self.python, "-c", py_command.format(install_scheme, prefix)] + command = [self.python, "-c", py_command.format(tmpfile_path)] c = vistir.misc.run( command, return_object=True, block=True, nospin=True, write_to_stdout=False ) - paths = json.loads(vistir.misc.to_text(c.out.strip())) - return paths + if c.returncode == 0: + paths = [] + with io.open(tmpfile_path, "r", encoding="utf-8") as fh: + paths = json.load(fh) + for key in ("include", "platinclude"): + if key in paths: + paths[key] = make_posix(paths[key]) + return paths + else: + vistir.misc.echo("Failed to load paths: {0}".format(c.err), fg="yellow") + vistir.misc.echo("Output: {0}".format(c.out), fg="yellow") + return None @cached_property def sys_prefix(self): diff --git a/pipenv/project.py b/pipenv/project.py index 1545aebd30..d0d668bfc8 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -27,7 +27,7 @@ from .environments import ( PIPENV_DEFAULT_PYTHON_VERSION, PIPENV_IGNORE_VIRTUALENVS, PIPENV_MAX_DEPTH, PIPENV_PIPFILE, PIPENV_PYTHON, PIPENV_TEST_INDEX, PIPENV_VENV_IN_PROJECT, - is_in_virtualenv + is_in_virtualenv, is_type_checking ) from .vendor.requirementslib.models.utils import get_default_pyproject_backend from .utils import ( @@ -38,6 +38,10 @@ safe_expandvars, get_pipenv_dist ) +if is_type_checking(): + from typing import Dict, Text, Union + TSource = Dict[Text, Union[Text, bool]] + def _normalized(p): if p is None: @@ -851,6 +855,10 @@ def sources(self): else: return self.pipfile_sources + @property + def index_urls(self): + return [src.get("url") for src in self.sources] + def find_source(self, source): """ Given a source, find it. diff --git a/pipenv/utils.py b/pipenv/utils.py index 64333e2697..2b0a8cdbf9 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -20,12 +20,8 @@ import tomlkit from click import echo as click_echo -six.add_move(six.MovedAttribute("Mapping", "collections", "collections.abc")) # noqa -six.add_move(six.MovedAttribute("Sequence", "collections", "collections.abc")) # noqa -six.add_move(six.MovedAttribute("Set", "collections", "collections.abc")) # noqa -from six.moves import Mapping, Sequence, Set from six.moves.urllib.parse import urlparse -from .vendor.vistir.compat import ResourceWarning, lru_cache +from .vendor.vistir.compat import ResourceWarning, lru_cache, Mapping, Sequence, Set from .vendor.vistir.misc import fs_str, run import crayons @@ -42,9 +38,10 @@ if environments.MYPY_RUNNING: from typing import Tuple, Dict, Any, List, Union, Optional, Text from .vendor.requirementslib.models.requirements import Requirement, Line + from .vendor.requirementslib.models.pipfile import Pipfile from .vendor.packaging.markers import Marker from .vendor.packaging.specifiers import Specifier - from .project import Project + from .project import Project, TSource logging.basicConfig(level=logging.ERROR) @@ -285,6 +282,88 @@ def prepare_pip_source_args(sources, pip_args=None): return pip_args +def get_project_index(index=None, trusted_hosts=None, project=None): + # type: (Optional[Union[str, TSource]], Optional[List[str]], Optional[Project]) -> TSource + from .project import SourceNotFound + if not project: + from .core import project + if trusted_hosts is None: + trusted_hosts = [] + if isinstance(index, Mapping): + return project.find_source(index.get("url")) + try: + source = project.find_source(index) + except SourceNotFound: + index_url = urllib3_util.parse_url(index) + src_name = project.src_name_from_url(index) + verify_ssl = index_url.host not in trusted_hosts + source = {"url": index, "verify_ssl": verify_ssl, "name": src_name} + return source + + +def get_source_list( + index=None, # type: Optional[Union[str, TSource]] + extra_indexes=None, # type: Optional[List[str]] + trusted_hosts=None, # type: Optional[List[str]] + pypi_mirror=None, # type: Optional[str] + project=None, # type: Optional[Project] +): + # type: (...) -> List[TSource] + sources = [] # type: List[TSource] + if not project: + from .core import project + if index: + sources.append(get_project_index(index)) + if extra_indexes: + if isinstance(extra_indexes, six.string_types): + extra_indexes = [extra_indexes,] + for source in extra_indexes: + extra_src = get_project_index(source) + if not sources or extra_src["url"] != sources[0]["url"]: + sources.append(extra_src) + else: + for source in project.pipfile_sources: + if not sources or source["url"] != sources[0]["url"]: + sources.append(source) + if not sources: + sources = project.pipfile_sources[:] + if pypi_mirror: + sources = [ + create_mirror_source(pypi_mirror) if is_pypi_url(source["url"]) else source + for source in sources + ] + return sources + + +def get_indexes_from_requirement(req, project=None, index=None, extra_indexes=None, trusted_hosts=None, pypi_mirror=None): + # type: (Requirement, Optional[Project], Optional[Text], Optional[List[Text]], Optional[List[Text]], Optional[Text]) -> Tuple[TSource, List[TSource], List[Text]] + if not project: + from .core import project + index_sources = [] # type: List[TSource] + if not trusted_hosts: + trusted_hosts = [] # type: List[Text] + if extra_indexes is None: + extra_indexes = [] + project_indexes = project.pipfile_sources[:] + indexes = [] + if req.index: + indexes.append(req.index) + if getattr(req, "extra_indexes", None): + if not isinstance(req.extra_indexes, list): + indexes.append(req.extra_indexes) + else: + indexes.extend(req.extra_indexes) + indexes.extend(project_indexes) + if len(indexes) > 1: + index, extra_indexes = indexes[0], indexes[1:] + index_sources = get_source_list(index=index, extra_indexes=extra_indexes, trusted_hosts=trusted_hosts, pypi_mirror=pypi_mirror, project=project) + if len(index_sources) > 1: + index_source, extra_index_sources = index_sources[0], index_sources[1:] + else: + index_source, extra_index_sources = index_sources[0], [] + return index_source, extra_index_sources + + @lru_cache() def get_pipenv_sitedir(): # type: () -> Optional[str] @@ -573,6 +652,29 @@ def create( markers_lookup=markers_lookup, skipped=skipped, clear=clear, pre=pre ) + @classmethod + def from_pipfile(cls, project=None, pipfile=None, dev=False, pre=False, clear=False): + # type: (Optional[Project], Optional[Pipfile], bool, bool, bool) -> "Resolver" + from pipenv.vendor.vistir.path import create_tracked_tempdir + if not project: + from pipenv.core import project + if not pipfile: + pipfile = project._pipfile + req_dir = create_tracked_tempdir(suffix="-requirements", prefix="pipenv-") + index_lookup, markers_lookup = {}, {} + deps = set() + if dev: + deps.update(set([req.as_line() for req in pipfile.dev_packages])) + deps.update(set([req.as_line() for req in pipfile.packages])) + constraints, skipped, index_lookup, markers_lookup = cls.get_metadata( + list(deps), index_lookup, markers_lookup, project, project.sources, + req_dir=req_dir, pre=pre, clear=clear + ) + return Resolver( + constraints, req_dir, project, project.sources, index_lookup=index_lookup, + markers_lookup=markers_lookup, skipped=skipped, clear=clear, pre=pre + ) + @property def pip_command(self): if self._pip_command is None: @@ -945,6 +1047,8 @@ def format_requirement_for_lockfile(req, markers_lookup, index_lookup, hashes=No if markers: entry.update({"markers": markers}) entry = translate_markers(entry) + if req.vcs or req.editable and entry.get("index"): + del entry["index"] return name, entry diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 40a49d48c0..dc76661999 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -1156,7 +1156,8 @@ def parse_link(self): def parse_markers(self): # type: () -> None if self.markers: - markers = PackagingRequirement("fakepkg; {0}".format(self.markers)).marker + marker_str = self.markers.replace('"', "'") + markers = PackagingRequirement("fakepkg; {0}".format(marker_str)).marker self.parsed_marker = markers @property @@ -1229,7 +1230,12 @@ def line_is_installable(self): def parse(self): # type: () -> None + self.line = self.line.strip() + if self.line.startswith('"'): + self.line = self.line.strip('"') self.line, self.markers = split_markers_from_line(self.parse_hashes().line) + if self.markers: + self.markers = self.markers.replace('"', "'") self.parse_extras() self.line = self.line.strip('"').strip("'").strip() if self.line.startswith("git+file:/") and not self.line.startswith( @@ -2614,16 +2620,28 @@ def get_line_instance(self): # type: () -> Line line_parts = [] if self.req: - line_parts.append(self.req.line_part) + if self.req.line_part.startswith("-e "): + line_parts.extend(self.req.line_part.split(" ", 1)) + else: + line_parts.append(self.req.line_part) if not self.is_vcs and not self.vcs and self.extras_as_pip: line_parts.append(self.extras_as_pip) if self._specifiers and not (self.is_file_or_url or self.is_vcs): line_parts.append(self._specifiers) if self.markers: - line_parts.append("; {0}".format(self.markers)) - if self.hashes_as_pip: + line_parts.append("; {0}".format(self.markers.replace('"', "'"))) + if self.hashes_as_pip and not (self.editable or self.vcs or self.is_vcs): line_parts.append(self.hashes_as_pip) - line = "".join(line_parts) + if self.editable: + if line_parts[0] == "-e": + line = "".join(line_parts[1:]) + else: + line = "".join(line_parts) + if self.markers: + line = '"{0}"'.format(line) + line = "-e {0}".format(line) + else: + line = "".join(line_parts) return Line(line) @property diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py index 1c1c320d93..4b4979549d 100644 --- a/pipenv/vendor/requirementslib/models/utils.py +++ b/pipenv/vendor/requirementslib/models/utils.py @@ -504,6 +504,10 @@ def get_pyproject(path): def split_markers_from_line(line): # type: (AnyStr) -> Tuple[AnyStr, Optional[AnyStr]] """Split markers from a dependency""" + quote_chars = ["'", '"'] + line_quote = next(iter(quote for quote in quote_chars if line.startswith(quote)), None) + if line_quote and line.endswith(line_quote): + line = line.strip(line_quote) if not any(line.startswith(uri_prefix) for uri_prefix in SCHEME_LIST): marker_sep = ";" else: diff --git a/tests/fixtures/fake-package/.coveragerc b/tests/fixtures/fake-package/.coveragerc new file mode 100644 index 0000000000..1b3a057198 --- /dev/null +++ b/tests/fixtures/fake-package/.coveragerc @@ -0,0 +1,27 @@ +[run] +branch = True +parallel = True +source = src/fake_package/ + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +[html] +directory = htmlcov + +[xml] +output = coverage.xml diff --git a/tests/fixtures/fake-package/.editorconfig b/tests/fixtures/fake-package/.editorconfig new file mode 100644 index 0000000000..7470e9dbe1 --- /dev/null +++ b/tests/fixtures/fake-package/.editorconfig @@ -0,0 +1,27 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false + +[*.toml] +indent_size = 2 + +[*.{yaml,yml}] +indent_size = 2 + +# Makefiles always use tabs for indentation. +[Makefile] +indent_style = tab + +# Batch files use tabs for indentation, and old Notepad hates LF. +[*.bat] +indent_style = tab +end_of_line = crlf diff --git a/tests/fixtures/fake-package/.gitignore b/tests/fixtures/fake-package/.gitignore new file mode 100644 index 0000000000..ab621d861a --- /dev/null +++ b/tests/fixtures/fake-package/.gitignore @@ -0,0 +1,108 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.typeshed + +.vscode/ + +pip-wheel-metadata diff --git a/tests/fixtures/fake-package/.pre-commit-config.yaml b/tests/fixtures/fake-package/.pre-commit-config.yaml new file mode 100644 index 0000000000..7ecca7dc4e --- /dev/null +++ b/tests/fixtures/fake-package/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: + - repo: https://github.com/ambv/black + rev: stable + hooks: + - id: black + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.0.0 + hooks: + - id: flake8 + + - repo: https://github.com/asottile/seed-isort-config + rev: v1.7.0 + hooks: + - id: seed-isort-config + + - repo: https://github.com/pre-commit/mirrors-isort + rev: v4.3.9 + hooks: + - id: isort diff --git a/tests/fixtures/fake-package/.travis.yml b/tests/fixtures/fake-package/.travis.yml new file mode 100644 index 0000000000..84bf6c0397 --- /dev/null +++ b/tests/fixtures/fake-package/.travis.yml @@ -0,0 +1,38 @@ +language: python +sudo: false +cache: pip +dist: trusty + +matrix: + fast_finish: true + +install: + - "python -m pip install --upgrade pip pytest-timeout" + - "python -m pip install -e .[tests]" +script: + - "python -m pytest -v -n 8 tests/" + +jobs: + include: + - stage: test + - python: "3.7" + dist: xenial + sudo: required + - python: "3.6" + - python: "2.7" + - python: "3.5" + - python: "3.4" + - stage: packaging + python: "3.6" + install: + - "pip install --upgrade twine readme-renderer[md]" + script: + - "python setup.py sdist" + - "twine check dist/*" + - stage: coverage + python: "3.6" + install: + - "python -m pip install --upgrade pip pytest-timeout pytest-cov" + - "python -m pip install --upgrade -e .[tests]" + script: + - "python -m pytest -n auto --timeout 300 --cov=fake_package --cov-report=term-missing --cov-report=xml --cov-report=html tests" diff --git a/tests/fixtures/fake-package/LICENSE b/tests/fixtures/fake-package/LICENSE new file mode 100644 index 0000000000..0beb71e0a4 --- /dev/null +++ b/tests/fixtures/fake-package/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2019, Dan Ryan + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/tests/fixtures/fake-package/MANIFEST.in b/tests/fixtures/fake-package/MANIFEST.in new file mode 100644 index 0000000000..7ab57a2180 --- /dev/null +++ b/tests/fixtures/fake-package/MANIFEST.in @@ -0,0 +1,19 @@ +include LICENSE* README* +include CHANGELOG.rst +include pyproject.toml + +exclude .editorconfig +exclude .coveragerc +exclude .travis.yml +exclude tox.ini +exclude appveyor.yml +exclude Pipfile* + +recursive-include docs Makefile *.rst *.py *.bat +recursive-exclude docs requirements*.txt + +prune .github +prune docs/build +prune news +prune tasks +prune tests diff --git a/tests/fixtures/fake-package/Pipfile b/tests/fixtures/fake-package/Pipfile new file mode 100644 index 0000000000..284b77988c --- /dev/null +++ b/tests/fixtures/fake-package/Pipfile @@ -0,0 +1,14 @@ +[packages] +fake_package = { path = '.', editable = true, extras = ["dev", "tests"] } + +[dev-packages] +towncrier = '*' +sphinx = '*' +sphinx-rtd-theme = '*' + +[scripts] +release = 'inv release' +tests = "pytest -v tests" +draft = "towncrier --draft" +changelog = "towncrier" +build = "setup.py sdist bdist_wheel" diff --git a/tests/fixtures/fake-package/README.rst b/tests/fixtures/fake-package/README.rst new file mode 100644 index 0000000000..4256cd1f8e --- /dev/null +++ b/tests/fixtures/fake-package/README.rst @@ -0,0 +1,3 @@ +=============================================================================== +fake_package: A fake python package. +=============================================================================== diff --git a/tests/fixtures/fake-package/appveyor.yml b/tests/fixtures/fake-package/appveyor.yml new file mode 100644 index 0000000000..758f4cfcfd --- /dev/null +++ b/tests/fixtures/fake-package/appveyor.yml @@ -0,0 +1,61 @@ +build: off +version: 1.0.{build} +skip_branch_with_pr: true + +init: +- ps: >- + + git config --global core.sharedRepository true + + git config --global core.longpaths true + + git config --global core.autocrlf input + + if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod ` + https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` + Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { ` + Write-Host "There are newer queued builds for this pull request, skipping build." + Exit-AppveyorBuild + } + + If (($env:SKIP_NOTAG -eq "true") -and ($env:APPVEYOR_REPO_TAG -ne "true")) { + Write-Host "Skipping build, not at a tag." + Exit-AppveyorBuild + } + +environment: + GIT_ASK_YESNO: 'false' + APPVEYOR_SAVE_CACHE_ON_ERROR: 'true' + APPVEYOR_SKIP_FINALIZE_ON_EXIT: 'true' + SHELL: 'windows' + PYTHON_ARCH: '64' + PYTHONIOENCODING: 'utf-8' + + matrix: + # Unit and integration tests. + - PYTHON: "C:\\Python27" + RUN_INTEGRATION_TESTS: "True" + - PYTHON: "C:\\Python37-x64" + RUN_INTEGRATION_TESTS: "True" + # Unit tests only. + - PYTHON: "C:\\Python36-x64" + - PYTHON: "C:\\Python34-x64" + - PYTHON: "C:\\Python35-x64" + +cache: +- '%LocalAppData%\pip\cache' +- '%LocalAppData%\pipenv\cache' + +install: + - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" + - "python --version" + - "python -m pip install --upgrade pip pytest-timeout" + - "python -m pip install -e .[tests]" + + +test_script: + # Shorten paths, workaround https://bugs.python.org/issue18199 + - "subst T: %TEMP%" + - "set TEMP=T:\\" + - "set TMP=T:\\" + - "python -m pytest -n auto -v tests" diff --git a/tests/fixtures/fake-package/docs/conf.py b/tests/fixtures/fake-package/docs/conf.py new file mode 100644 index 0000000000..3aded982fe --- /dev/null +++ b/tests/fixtures/fake-package/docs/conf.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +PACKAGE_DIR = os.path.join(ROOT, "src/fake_package") +sys.path.insert(0, PACKAGE_DIR) + + +# -- Project information ----------------------------------------------------- + +project = 'fake_package' +copyright = '2019, Dan Ryan ' +author = 'Dan Ryan ' + +# The short X.Y version +version = '0.0' +# The full version, including alpha/beta/rc tags +release = '0.0.0.dev0' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.viewcode', + 'sphinx.ext.todo', + 'sphinx.ext.intersphinx', + 'sphinx.ext.autosummary' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', '_man', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' +autosummary_generate = True + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'fake_packagedoc' +extlinks = { + 'issue': ('https://github.com/sarugaku/fake_package/issues/%s', '#'), + 'pull': ('https://github.com/sarugaku/fake_package/pull/%s', 'PR #'), +} +html_theme_options = { + 'display_version': True, + 'prev_next_buttons_location': 'bottom', + 'style_external_links': True, + 'vcs_pageview_mode': '', + # Toc options + 'collapse_navigation': True, + 'sticky_navigation': True, + 'navigation_depth': 4, + 'includehidden': True, + 'titles_only': False +} + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'fake_package.tex', 'fake_package Documentation', + 'Dan Ryan \\textless{}dan@danryan.co\\textgreater{}', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'fake_package', 'fake_package Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'fake_package', 'fake_package Documentation', + author, 'fake_package', 'A fake python package.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project +epub_author = author +epub_publisher = author +epub_copyright = copyright + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True +intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} diff --git a/tests/fixtures/fake-package/docs/requirements.txt b/tests/fixtures/fake-package/docs/requirements.txt new file mode 100644 index 0000000000..82133027c9 --- /dev/null +++ b/tests/fixtures/fake-package/docs/requirements.txt @@ -0,0 +1,2 @@ +sphinx +sphinx_rtd_theme diff --git a/tests/fixtures/fake-package/news/.gitignore b/tests/fixtures/fake-package/news/.gitignore new file mode 100644 index 0000000000..f935021a8f --- /dev/null +++ b/tests/fixtures/fake-package/news/.gitignore @@ -0,0 +1 @@ +!.gitignore diff --git a/tests/fixtures/fake-package/pyproject.toml b/tests/fixtures/fake-package/pyproject.toml new file mode 100644 index 0000000000..e157956b39 --- /dev/null +++ b/tests/fixtures/fake-package/pyproject.toml @@ -0,0 +1,50 @@ +[build-system] +requires = ['setuptools>=40.8.0', 'wheel>=0.33.0'] + +[tool.black] +line-length = 90 +include = '\.pyi?$' +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.pyre_configuration + | \.venv + | _build + | buck-out + | build + | dist +) +''' + +[tool.towncrier] +package = 'fake-package' +package_dir = 'src' +filename = 'CHANGELOG.rst' +directory = 'news/' +title_format = '{version} ({project_date})' +issue_format = '`#{issue} `_' +template = 'tasks/CHANGELOG.rst.jinja2' + + [[tool.towncrier.type]] + directory = 'feature' + name = 'Features' + showcontent = true + + [[tool.towncrier.type]] + directory = 'bugfix' + name = 'Bug Fixes' + showcontent = true + + [[tool.towncrier.type]] + directory = 'trivial' + name = 'Trivial Changes' + showcontent = false + + [[tool.towncrier.type]] + directory = 'removal' + name = 'Removals and Deprecations' + showcontent = true diff --git a/tests/fixtures/fake-package/setup.cfg b/tests/fixtures/fake-package/setup.cfg new file mode 100644 index 0000000000..c357cea97a --- /dev/null +++ b/tests/fixtures/fake-package/setup.cfg @@ -0,0 +1,120 @@ +[metadata] +name = fake_package +description = A fake python package. +url = https://github.com/sarugaku/fake_package +author = Dan Ryan +author_email = dan@danryan.co +long_description = file: README.rst +license = ISC License +keywords = fake package test +classifier = + Development Status :: 1 - Planning + License :: OSI Approved :: ISC License (ISCL) + Operating System :: OS Independent + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.6 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.4 + Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Topic :: Software Development :: Libraries :: Python Modules + +[options.extras_require] +tests = + pytest + pytest-xdist + pytest-cov + pytest-timeout + readme-renderer[md] + twine +dev = + black;python_version>="3.6" + flake8 + flake8-bugbear;python_version>="3.5" + invoke + isort + mypy;python_version>="3.5" + parver + pre-commit + rope + wheel + +[options] +zip_safe = true +python_requires = >=2.6,!=3.0,!=3.1,!=3.2,!=3.3 +setup_requires = setuptools>=40.8.0 +install_requires = + invoke + attrs + +[bdist_wheel] +universal = 1 + +[tool:pytest] +strict = true +plugins = cov flake8 +addopts = -ra +testpaths = tests/ +norecursedirs = .* build dist news tasks docs +flake8-ignore = + docs/source/* ALL + tests/*.py ALL + setup.py ALL +filterwarnings = + ignore::DeprecationWarning + ignore::PendingDeprecationWarning + +[isort] +atomic = true +not_skip = __init__.py +line_length = 90 +indent = ' ' +multi_line_output = 3 +known_third_party = invoke,parver,pytest,setuptools,towncrier +known_first_party = + fake_package + tests +combine_as_imports=True +include_trailing_comma = True +force_grid_wrap=0 + +[flake8] +max-line-length = 90 +select = C,E,F,W,B,B950 +ignore = + # The default ignore list: + D203,F401,E123,E203,W503,E501,E402 + #E121,E123,E126,E226,E24,E704, + # Our additions: + # E123: closing bracket does not match indentation of opening bracket’s line + # E203: whitespace before ‘:’ + # E129: visually indented line with same indent as next logical line + # E222: multiple spaces after operator + # E231: missing whitespace after ',' + # D203: 1 blank line required before class docstring + # E402: module level import not at top of file + # E501: line too long (using B950 from flake8-bugbear) + # F401: Module imported but unused + # W503: line break before binary operator (not a pep8 issue, should be ignored) +exclude = + .tox, + .git, + __pycache__, + docs/source/*, + build, + dist, + tests/*, + *.pyc, + *.egg-info, + .cache, + .eggs, + setup.py, +max-complexity=13 + +[mypy] +ignore_missing_imports=true +follow_imports=skip +html_report=mypyhtml +python_version=2.7 diff --git a/tests/fixtures/fake-package/setup.py b/tests/fixtures/fake-package/setup.py new file mode 100644 index 0000000000..1d2c88f859 --- /dev/null +++ b/tests/fixtures/fake-package/setup.py @@ -0,0 +1,35 @@ +import ast +import os + +from setuptools import find_packages, setup + + +ROOT = os.path.dirname(__file__) + +PACKAGE_NAME = 'fake_package' + +VERSION = None + +with open(os.path.join(ROOT, 'src', PACKAGE_NAME.replace("-", "_"), '__init__.py')) as f: + for line in f: + if line.startswith('__version__ = '): + VERSION = ast.literal_eval(line[len('__version__ = '):].strip()) + break +if VERSION is None: + raise EnvironmentError('failed to read version') + + +# Put everything in setup.cfg, except those that don't actually work? +setup( + # These really don't work. + package_dir={'': 'src'}, + packages=find_packages('src'), + + # I don't know how to specify an empty key in setup.cfg. + package_data={ + '': ['LICENSE*', 'README*'], + }, + + # I need this to be dynamic. + version=VERSION, +) diff --git a/tests/fixtures/fake-package/src/fake_package/__init__.py b/tests/fixtures/fake-package/src/fake_package/__init__.py new file mode 100644 index 0000000000..b8023d8bc0 --- /dev/null +++ b/tests/fixtures/fake-package/src/fake_package/__init__.py @@ -0,0 +1 @@ +__version__ = '0.0.1' diff --git a/tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 b/tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 new file mode 100644 index 0000000000..8aff205734 --- /dev/null +++ b/tests/fixtures/fake-package/tasks/CHANGELOG.rst.jinja2 @@ -0,0 +1,40 @@ +{% for section in sections %} +{% set underline = "-" %} +{% if section %} +{{section}} +{{ underline * section|length }}{% set underline = "~" %} + +{% endif %} +{% if sections[section] %} +{% for category, val in definitions.items() if category in sections[section] and category != 'trivial' %} + +{{ definitions[category]['name'] }} +{{ underline * definitions[category]['name']|length }} + +{% if definitions[category]['showcontent'] %} +{% for text, values in sections[section][category]|dictsort(by='value') %} +- {{ text }}{% if category != 'process' %} + {{ values|sort|join(',\n ') }} + {% endif %} + +{% endfor %} +{% else %} +- {{ sections[section][category]['']|sort|join(', ') }} + + +{% endif %} +{% if sections[section][category]|length == 0 %} + +No significant changes. + + +{% else %} +{% endif %} +{% endfor %} +{% else %} + +No significant changes. + + +{% endif %} +{% endfor %} diff --git a/tests/fixtures/fake-package/tasks/__init__.py b/tests/fixtures/fake-package/tasks/__init__.py new file mode 100644 index 0000000000..a8cedab4ec --- /dev/null +++ b/tests/fixtures/fake-package/tasks/__init__.py @@ -0,0 +1,175 @@ +import pathlib +import shutil +import subprocess + +import invoke +import parver + +from towncrier._builder import ( + find_fragments, render_fragments, split_fragments, +) +from towncrier._settings import load_config + + +ROOT = pathlib.Path(__file__).resolve().parent.parent + +PACKAGE_NAME = 'fake_package' + +INIT_PY = ROOT.joinpath('src', PACKAGE_NAME, '__init__.py') + + +@invoke.task() +def typecheck(ctx): + src_dir = ROOT / "src" / PACKAGE_NAME + src_dir = src_dir.as_posix() + config_file = ROOT / "setup.cfg" + env = {"MYPYPATH": src_dir} + ctx.run(f"mypy {src_dir} --config-file={config_file}", env=env) + + +@invoke.task() +def clean(ctx): + """Clean previously built package artifacts. + """ + ctx.run(f'python setup.py clean') + dist = ROOT.joinpath('dist') + print(f'[clean] Removing {dist}') + if dist.exists(): + shutil.rmtree(str(dist)) + + +def _read_version(): + out = subprocess.check_output(['git', 'tag'], encoding='ascii') + try: + version = max(parver.Version.parse(v).normalize() for v in ( + line.strip() for line in out.split('\n') + ) if v) + except ValueError: + version = parver.Version.parse('0.0.0') + return version + + +def _write_version(v): + lines = [] + with INIT_PY.open() as f: + for line in f: + if line.startswith("__version__ = "): + line = f"__version__ = {repr(str(v))}\n".replace("'", '"') + lines.append(line) + with INIT_PY.open("w", newline="\n") as f: + f.write("".join(lines)) + + +def _render_log(): + """Totally tap into Towncrier internals to get an in-memory result. + """ + config = load_config(ROOT) + definitions = config["types"] + fragments, fragment_filenames = find_fragments( + pathlib.Path(config["directory"]).absolute(), + config["sections"], + None, + definitions, + ) + rendered = render_fragments( + pathlib.Path(config["template"]).read_text(encoding="utf-8"), + config["issue_format"], + split_fragments(fragments, definitions), + definitions, + config["underlines"][1:], + False, # Don't add newlines to wrapped text. + ) + return rendered + + +REL_TYPES = ("major", "minor", "patch", "post") + + +def _bump_release(version, type_): + if type_ not in REL_TYPES: + raise ValueError(f"{type_} not in {REL_TYPES}") + index = REL_TYPES.index(type_) + next_version = version.base_version().bump_release(index=index) + print(f"[bump] {version} -> {next_version}") + return next_version + + +def _prebump(version, prebump): + next_version = version.bump_release(index=prebump).bump_dev() + print(f"[bump] {version} -> {next_version}") + return next_version + + +PREBUMP = 'patch' + + +@invoke.task(pre=[clean]) +def release(ctx, type_, repo, prebump=PREBUMP): + """Make a new release. + """ + if prebump not in REL_TYPES: + raise ValueError(f'{type_} not in {REL_TYPES}') + prebump = REL_TYPES.index(prebump) + + version = _read_version() + version = _bump_release(version, type_) + _write_version(version) + + # Needs to happen before Towncrier deletes fragment files. + tag_content = _render_log() + + ctx.run('towncrier') + + ctx.run(f'git commit -am "Release {version}"') + + tag_content = tag_content.replace('"', '\\"') + ctx.run(f'git tag -a {version} -m "Version {version}\n\n{tag_content}"') + + ctx.run(f'python setup.py sdist bdist_wheel') + + dist_pattern = f'{PACKAGE_NAME.replace("-", "[-_]")}-*' + artifacts = list(ROOT.joinpath('dist').glob(dist_pattern)) + filename_display = '\n'.join(f' {a}' for a in artifacts) + print(f'[release] Will upload:\n{filename_display}') + try: + input('[release] Release ready. ENTER to upload, CTRL-C to abort: ') + except KeyboardInterrupt: + print('\nAborted!') + return + + arg_display = ' '.join(f'"{n}"' for n in artifacts) + ctx.run(f'twine upload --repository="{repo}" {arg_display}') + + version = _prebump(version, prebump) + _write_version(version) + + ctx.run(f'git commit -am "Prebump to {version}"') + + +@invoke.task +def build_docs(ctx): + _current_version = _read_version() + minor = [str(i) for i in _current_version.release[:2]] + docs_folder = (ROOT / 'docs').as_posix() + if not docs_folder.endswith('/'): + docs_folder = '{0}/'.format(docs_folder) + args = ["--ext-autodoc", "--ext-viewcode", "-o", docs_folder] + args.extend(["-A", "'Dan Ryan '"]) + args.extend(["-R", str(_current_version)]) + args.extend(["-V", ".".join(minor)]) + args.extend(["-e", "-M", "-F", f"src/{PACKAGE_NAME}"]) + print("Building docs...") + ctx.run("sphinx-apidoc {0}".format(" ".join(args))) + + +@invoke.task +def clean_mdchangelog(ctx): + changelog = ROOT / "CHANGELOG.md" + content = changelog.read_text() + content = re.sub( + r"([^\n]+)\n?\s+\[[\\]+(#\d+)\]\(https://github\.com/sarugaku/[\w\-]+/issues/\d+\)", + r"\1 \2", + content, + flags=re.MULTILINE, + ) + changelog.write_text(content) diff --git a/tests/fixtures/fake-package/tox.ini b/tests/fixtures/fake-package/tox.ini new file mode 100644 index 0000000000..2bc8e1d286 --- /dev/null +++ b/tests/fixtures/fake-package/tox.ini @@ -0,0 +1,37 @@ +[tox] +envlist = + docs, packaging, py27, py35, py36, py37, coverage-report + +[testenv] +passenv = CI GIT_SSL_CAINFO +setenv = + LC_ALL = en_US.UTF-8 +deps = + coverage + -e .[tests] +commands = coverage run --parallel -m pytest --timeout 300 [] +install_command = python -m pip install {opts} {packages} +usedevelop = True + +[testenv:coverage-report] +deps = coverage +skip_install = true +commands = + coverage combine + coverage report + +[testenv:docs] +deps = + -r{toxinidir}/docs/requirements.txt + -e .[tests] +commands = + sphinx-build -d {envtmpdir}/doctrees -b html docs docs/build/html + sphinx-build -d {envtmpdir}/doctrees -b man docs docs/build/man + +[testenv:packaging] +deps = + check-manifest + readme_renderer +commands = + check-manifest + python setup.py check -m -r -s diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index a682d0dda2..6d936a7faf 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -223,17 +223,17 @@ def test_check_unused(PipenvInstance): with PipenvInstance(chdir=True) as p: with open('__init__.py', 'w') as f: contents = """ -import fake_package +import click import records import flask """.strip() f.write(contents) - p.pipenv('install requests fake_package flask') + p.pipenv('install requests click flask') - assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'fake_package', 'flask']) + assert all(pkg in p.pipfile['packages'] for pkg in ['requests', 'click', 'flask']), p.pipfile["packages"] c = p.pipenv('check --unused .') - assert 'fake_package' not in c.out + assert 'click' not in c.out assert 'flask' not in c.out diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 4fb936b9dd..6d9f1368de 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -315,13 +315,13 @@ def test_skip_requirements_when_pipfile(PipenvInstance): contents = """ [packages] six = "*" -"fake_package" = "<0.12" +fake_package = "<0.12" """.strip() f.write(contents) c = p.pipenv("install") assert c.ok assert "fake_package" in p.pipfile["packages"] - assert "fake_package" in p.lockfile["default"] + assert "fake-package" in p.lockfile["default"] assert "six" in p.pipfile["packages"] assert "six" in p.lockfile["default"] assert "requests" not in p.pipfile["packages"] diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py index 379731a417..00f9c7897b 100644 --- a/tests/integration/test_install_markers.py +++ b/tests/integration/test_install_markers.py @@ -27,7 +27,7 @@ def test_package_environment_markers(PipenvInstance): c = p.pipenv('install') assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['fake_package'], p.lockfile["default"]["fake_package"] + assert 'markers' in p.lockfile['default']['fake-package'], p.lockfile["default"] c = p.pipenv('run python -c "import fake_package;"') assert c.return_code == 1 @@ -68,7 +68,7 @@ def test_specific_package_environment_markers(PipenvInstance): with open(p.pipfile_path, 'w') as f: contents = """ [packages] -fake_package = {version = "*", os_name = "== 'splashwear'"} +fake-package = {version = "*", os_name = "== 'splashwear'"} """.strip() f.write(contents) @@ -76,7 +76,7 @@ def test_specific_package_environment_markers(PipenvInstance): assert c.return_code == 0 assert 'Ignoring' in c.out - assert 'markers' in p.lockfile['default']['fake_package'] + assert 'markers' in p.lockfile['default']['fake-package'] c = p.pipenv('run python -c "import fake_package;"') assert c.return_code == 1 diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py index 58df07bd02..7a69e59581 100644 --- a/tests/integration/test_install_uri.py +++ b/tests/integration/test_install_uri.py @@ -166,8 +166,8 @@ def test_install_editable_git_tag(PipenvInstance_NoPyPI): # ! This is failing @pytest.mark.index @pytest.mark.install @pytest.mark.needs_internet -def test_install_named_index_alias(PipenvInstance): - with PipenvInstance() as p: +def test_install_named_index_alias(PipenvInstance_NoPyPI): + with PipenvInstance_NoPyPI() as p: with open(p.pipfile_path, "w") as f: contents = """ [[source]] diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 44f68bfe05..a69d580e64 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -289,8 +289,8 @@ def test_complex_lock_deep_extras(PipenvInstance): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.skip_lock @pytest.mark.needs_internet -def test_private_index_skip_lock(PipenvInstance): - with PipenvInstance() as p: +def test_private_index_skip_lock(PipenvInstance_NoPyPI): + with PipenvInstance_NoPyPI() as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] @@ -317,9 +317,9 @@ def test_private_index_skip_lock(PipenvInstance): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.requirements @pytest.mark.needs_internet -def test_private_index_lock_requirements(PipenvInstance): +def test_private_index_lock_requirements(PipenvInstance_NoPyPI): # Don't use the local fake pypi - with PipenvInstance() as p: + with PipenvInstance_NoPyPI() as p: with open(p.pipfile_path, 'w') as f: contents = """ [[source]] @@ -350,9 +350,9 @@ def test_private_index_lock_requirements(PipenvInstance): @pytest.mark.install # private indexes need to be uncached for resolution @pytest.mark.requirements @pytest.mark.needs_internet -def test_private_index_mirror_lock_requirements(PipenvInstance): +def test_private_index_mirror_lock_requirements(PipenvInstance_NoPyPI): # Don't use the local fake pypi - with temp_environ(), PipenvInstance(chdir=True) as p: + with temp_environ(), PipenvInstance_NoPyPI(chdir=True) as p: # Using pypi.python.org as pipenv-test-public-package is not # included in the local pypi mirror mirror_url = os.environ.pop('PIPENV_TEST_INDEX', "https://pypi.kennethreitz.org/simple") @@ -371,7 +371,7 @@ def test_private_index_mirror_lock_requirements(PipenvInstance): [packages] six = {version = "*", index = "testpypi"} -requests = "*" +fake-package = "*" """.strip() f.write(contents) c = p.pipenv('install --pypi-mirror {0}'.format(mirror_url)) diff --git a/tests/pypi b/tests/pypi index 2c4b6de4d8..0801b3aecf 160000 --- a/tests/pypi +++ b/tests/pypi @@ -1 +1 @@ -Subproject commit 2c4b6de4d88d7d5732bdf0c9345ad10f8336abd3 +Subproject commit 0801b3aecfbe8385ea879860fb36477a13a4278b diff --git a/tests/pytest-pypi/pytest_pypi/app.py b/tests/pytest-pypi/pytest_pypi/app.py index fa494ea8cd..607d219933 100644 --- a/tests/pytest-pypi/pytest_pypi/app.py +++ b/tests/pytest-pypi/pytest_pypi/app.py @@ -3,17 +3,31 @@ import io import sys -import requests -from flask import Flask, redirect, abort, render_template, send_file, jsonify +if sys.version_info[:2] >= (3, 0): + from xmlrpc.client import ServerProxy +else: + from xmlrpclib import ServerProxy + from zipfile import is_zipfile from tarfile import is_tarfile +import requests +from flask import Flask, redirect, abort, render_template, send_file, jsonify + + app = Flask(__name__) session = requests.Session() packages = {} ARTIFACTS = {} + +def get_pypi_package_names(): + client = ServerProxy("https://pypi.org/pypi") + pypi_packages = set(client.list_packages()) + return pypi_packages + + class Package(object): """Package represents a collection of releases from one or more directories""" @@ -107,16 +121,22 @@ def prepare_packages(path): if not (os.path.exists(path) and os.path.isdir(path)): raise ValueError("{} is not a directory!".format(path)) for root, dirs, files in os.walk(path): + if all([setup_file in list(files) for setup_file in ("setup.py", "setup.cfg")]): + continue for file in files: if not file.startswith('.') and not file.endswith('.json'): package_name = os.path.basename(root) if package_name and package_name == "fixtures": prepare_fixtures(root) continue + package_name = package_name.replace("_", "-") if package_name not in packages: packages[package_name] = Package(package_name) packages[package_name].add_release(os.path.join(root, file)) + remaining = get_pypi_package_names() - set(list(packages.keys())) + for pypi_pkg in remaining: + packages[pypi_pkg] = Package(pypi_pkg) @app.route('/') @@ -136,10 +156,18 @@ def artifacts(): @app.route('/simple//') def simple_package(package): - if package in packages: + if package in packages and packages[package].releases: return render_template('package.html', package=packages[package]) else: - abort(404) + try: + r = requests.get("https://pypi.org/simple/{0}".format(package)) + r.raise_for_status() + except Exception: + abort(404) + else: + return render_template( + 'package_pypi.html', package_contents=r.text + ) @app.route('/artifacts//') diff --git a/tests/pytest-pypi/pytest_pypi/templates/package_pypi.html b/tests/pytest-pypi/pytest_pypi/templates/package_pypi.html new file mode 100644 index 0000000000..217d8aa0e8 --- /dev/null +++ b/tests/pytest-pypi/pytest_pypi/templates/package_pypi.html @@ -0,0 +1,4 @@ + +{% autoescape false %} + {{ package_contents }} +{% endautoescape %}