From 7b7803148df3501cecb86a2f6607583421083d29 Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Wed, 22 Nov 2023 10:11:50 +0800 Subject: [PATCH] feat: export cross-platform requirements (#2418) --- news/2418.feature.md | 1 + pdm.lock | 59 ++--- pyproject.toml | 1 + src/pdm/cli/actions.py | 7 +- src/pdm/cli/commands/export.py | 7 +- src/pdm/cli/commands/run.py | 2 +- src/pdm/cli/utils.py | 7 +- src/pdm/formats/pipfile.py | 6 +- src/pdm/formats/poetry.py | 6 +- src/pdm/formats/requirements.py | 4 +- src/pdm/models/candidates.py | 6 +- src/pdm/models/markers.py | 230 ++++++++---------- src/pdm/models/repositories.py | 6 +- src/pdm/models/requirements.py | 38 +-- src/pdm/models/setup.py | 4 +- src/pdm/pytest.py | 2 +- src/pdm/resolver/core.py | 15 +- src/pdm/resolver/graph.py | 111 +++++++++ .../requirements.txt | 4 +- tests/models/test_candidates.py | 14 -- tests/models/test_marker.py | 50 +--- tests/models/test_requirements.py | 32 ++- tests/resolver/test_resolve.py | 39 ++- tests/test_formats.py | 5 +- 24 files changed, 359 insertions(+), 297 deletions(-) create mode 100644 news/2418.feature.md create mode 100644 src/pdm/resolver/graph.py diff --git a/news/2418.feature.md b/news/2418.feature.md new file mode 100644 index 0000000000..c36c4efc07 --- /dev/null +++ b/news/2418.feature.md @@ -0,0 +1 @@ +When exporting requirements, record the environment markers from all parents for each requirement. This allows the exported requirements to work on different platforms and Python versions. diff --git a/pdm.lock b/pdm.lock index 7c9b15f7b3..820ac99607 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "all", "doc", "pytest", "test", "tox", "workflow"] strategy = ["cross_platform"] lock_version = "4.4" -content_hash = "sha256:82e53197b4afcc358fdc567b55686a65c13652ec746879a4ba708ecb251234e5" +content_hash = "sha256:499eec45ab2d719e55dfe76faab2a2756e493d1a9baac105317f7453f32fd417" [[package]] name = "arpeggio" @@ -23,7 +23,6 @@ requires_python = ">=3.6" summary = "Better dates & times for Python" dependencies = [ "python-dateutil>=2.7.0", - "typing-extensions; python_version < \"3.8\"", ] files = [ {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, @@ -35,24 +34,11 @@ name = "attrs" version = "23.1.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" -dependencies = [ - "importlib-metadata; python_version < \"3.8\"", -] files = [ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] -[[package]] -name = "backports-cached-property" -version = "1.0.2" -requires_python = ">=3.6.0" -summary = "cached_property() - computed once per instance, cached as attribute" -files = [ - {file = "backports.cached-property-1.0.2.tar.gz", hash = "sha256:9306f9eed6ec55fd156ace6bc1094e2c86fae5fb2bf07b6a9c00745c656e75dd"}, - {file = "backports.cached_property-1.0.2-py3-none-any.whl", hash = "sha256:baeb28e1cd619a3c9ab8941431fe34e8490861fb998c6c4590693d50171db0cc"}, -] - [[package]] name = "binaryornot" version = "0.4.4" @@ -104,15 +90,6 @@ files = [ {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"}, ] -[[package]] -name = "cached-property" -version = "1.5.2" -summary = "A decorator for caching properties in classes." -files = [ - {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, - {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, -] - [[package]] name = "cachetools" version = "5.3.1" @@ -307,7 +284,6 @@ requires_python = ">=3.7" summary = "Composable command line interface toolkit" dependencies = [ "colorama; platform_system == \"Windows\"", - "importlib-metadata; python_version < \"3.8\"", ] files = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, @@ -360,12 +336,10 @@ version = "8.0.0" requires_python = ">=3.7,<4.0" summary = "A library for rendering project templates." dependencies = [ - "backports-cached-property>=1.0.0; python_version < \"3.8\"", "colorama>=0.4.3", "decorator>=5.1.1", "dunamai>=1.7.0", "funcy>=1.17", - "importlib-metadata<7.0,>=3.4; python_version < \"3.8\"", "jinja2-ansible-filters>=1.3.1", "jinja2>=3.1.1", "packaging>=23.0", @@ -376,7 +350,6 @@ dependencies = [ "pyyaml-include>=1.2", "pyyaml>=5.3.1", "questionary>=1.8.1", - "typing-extensions<5.0.0,>=3.7.4; python_version < \"3.8\"", ] files = [ {file = "copier-8.0.0-py3-none-any.whl", hash = "sha256:adad3b4c8101584ea2c2e2580e34f37e791940ae103e2480ae9f509d32e6cb0b"}, @@ -564,6 +537,19 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "dep-logic" +version = "0.0.2" +requires_python = ">=3.8" +summary = "Python dependency specifications supporting logical operations" +dependencies = [ + "packaging>=22", +] +files = [ + {file = "dep_logic-0.0.2-py3-none-any.whl", hash = "sha256:693869adfae132d99e39b0bf24aae16b96c9bca839bf8d02eaaa16719507a0d7"}, + {file = "dep_logic-0.0.2.tar.gz", hash = "sha256:7fcbda5908c3caf800dc07638c3a3d376ec886fb12691ac39ac0106b41a47b28"}, +] + [[package]] name = "distlib" version = "0.3.7" @@ -579,7 +565,6 @@ version = "1.17.0" requires_python = ">=3.5,<4.0" summary = "Dynamic version generation" dependencies = [ - "importlib-metadata>=1.6.0; python_version < \"3.8\"", "packaging>=20.9", ] files = [ @@ -657,7 +642,6 @@ version = "0.29.0" requires_python = ">=3.7" summary = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." dependencies = [ - "cached-property; python_version < \"3.8\"", "colorama>=0.4", ] files = [ @@ -681,7 +665,6 @@ version = "6.7.0" requires_python = ">=3.7" summary = "Read metadata from Python packages" dependencies = [ - "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", ] files = [ @@ -844,7 +827,6 @@ requires_python = ">=3.7" summary = "Python port of markdown-it. Markdown parsing, done right!" dependencies = [ "mdurl~=0.1", - "typing-extensions>=3.7.4; python_version < \"3.8\"", ] files = [ {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, @@ -963,7 +945,6 @@ dependencies = [ "platformdirs>=2.2.0", "pyyaml-env-tag>=0.1", "pyyaml>=5.1", - "typing-extensions>=3.10; python_version < \"3.8\"", "watchdog>=2.0", ] files = [ @@ -1207,9 +1188,6 @@ name = "platformdirs" version = "3.10.0" requires_python = ">=3.7" summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -dependencies = [ - "typing-extensions>=4.7.1; python_version < \"3.8\"", -] files = [ {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, @@ -1220,9 +1198,6 @@ name = "pluggy" version = "1.2.0" requires_python = ">=3.7" summary = "plugin and hook calling mechanisms for python" -dependencies = [ - "importlib-metadata>=0.12; python_version < \"3.8\"", -] files = [ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, @@ -1380,7 +1355,6 @@ summary = "pytest: simple powerful testing with Python" dependencies = [ "colorama; sys_platform == \"win32\"", "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", "pluggy<2.0,>=0.12", @@ -1437,7 +1411,6 @@ version = "12.0" requires_python = ">=3.7" summary = "pytest plugin to re-run tests to eliminate flaky failures" dependencies = [ - "importlib-metadata>=1; python_version < \"3.8\"", "packaging>=17.1", "pytest>=6.2", ] @@ -1868,13 +1841,11 @@ dependencies = [ "chardet>=5.1", "colorama>=0.4.6", "filelock>=3.12.2", - "importlib-metadata>=6.7; python_version < \"3.8\"", "packaging>=23.1", "platformdirs>=3.8", "pluggy>=1.2", "pyproject-api>=1.5.2", "tomli>=2.0.1; python_version < \"3.11\"", - "typing-extensions>=4.6.3; python_version < \"3.8\"", "virtualenv>=20.23.1", ] files = [ @@ -1922,7 +1893,6 @@ version = "0.12.1" requires_python = ">=3.7" summary = "A utility to fetch and download python packages" dependencies = [ - "cached-property>=1.5.2; python_version < \"3.8\"", "packaging>=20", "requests>=2.25", ] @@ -1958,7 +1928,6 @@ summary = "Virtual Python Environment builder" dependencies = [ "distlib<1,>=0.3.7", "filelock<4,>=3.12.2", - "importlib-metadata>=6.6; python_version < \"3.8\"", "platformdirs<4,>=3.9.1", ] files = [ diff --git a/pyproject.toml b/pyproject.toml index 9e6f5553b4..163314c165 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,7 @@ dependencies = [ "pyproject-hooks", "requests-toolbelt", "unearth>=0.12.1", + "dep-logic>=0.0.2,<1.0", "findpython>=0.4.0,<1.0.0a0", "tomlkit>=0.11.1,<1", "shellingham>=1.3.2", diff --git a/src/pdm/cli/actions.py b/src/pdm/cli/actions.py index 43c57b219f..c28ac1f0f4 100644 --- a/src/pdm/cli/actions.py +++ b/src/pdm/cli/actions.py @@ -128,7 +128,9 @@ def do_lock( return mapping -def resolve_candidates_from_lockfile(project: Project, requirements: Iterable[Requirement]) -> dict[str, Candidate]: +def resolve_candidates_from_lockfile( + project: Project, requirements: Iterable[Requirement], cross_platform: bool = False +) -> dict[str, Candidate]: ui = project.core.ui resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"]) reqs = [ @@ -138,12 +140,15 @@ def resolve_candidates_from_lockfile(project: Project, requirements: Iterable[Re with ui.open_spinner("Resolving packages from lockfile...") as spinner: reporter = BaseReporter() provider = project.get_provider(for_install=True) + if cross_platform: + provider.repository.ignore_compatibility = True resolver: Resolver = project.core.resolver_class(provider, reporter) mapping, *_ = resolve( resolver, reqs, project.environment.python_requires, resolve_max_rounds, + record_markers=cross_platform, ) spinner.update("Fetching hashes for resolved packages...") fetch_hashes(provider.repository, mapping) diff --git a/src/pdm/cli/commands/export.py b/src/pdm/cli/commands/export.py index ecf07b216f..fb0e7cea4f 100644 --- a/src/pdm/cli/commands/export.py +++ b/src/pdm/cli/commands/export.py @@ -60,11 +60,8 @@ def handle(self, project: Project, options: argparse.Namespace) -> None: else: if not project.lockfile.exists(): raise PdmUsageError("No lockfile found, please run `pdm lock` first.") - project.core.ui.warn( - "The exported requirements file is no longer cross-platform. " - "Using it on other platforms may cause unexpected result.", - ) - candidates = resolve_candidates_from_lockfile(project, requirements.values()) + + candidates = resolve_candidates_from_lockfile(project, requirements.values(), cross_platform=True) # Remove candidates with [extras] because the bare candidates are already # included packages = (candidate for candidate in candidates.values() if not candidate.req.extras) diff --git a/src/pdm/cli/commands/run.py b/src/pdm/cli/commands/run.py index e727e9b1af..65add4ac1e 100644 --- a/src/pdm/cli/commands/run.py +++ b/src/pdm/cli/commands/run.py @@ -65,7 +65,7 @@ def _interpolate_pdm(script: str) -> str: executable_path = Path(sys.executable) def replace(m: re.Match[str]) -> str: - return sh_join([executable_path.as_posix(), "-m", "pdm"]) + return shlex.join([executable_path.as_posix(), "-m", "pdm"]) interpolated = RE_PDM_PLACEHOLDER.sub(replace, script) return interpolated diff --git a/src/pdm/cli/utils.py b/src/pdm/cli/utils.py index 64d03a3698..726db78fef 100644 --- a/src/pdm/cli/utils.py +++ b/src/pdm/cli/utils.py @@ -200,12 +200,7 @@ def add_package(key: str, dist: Distribution | None) -> Package: if dist: requirements = ( parse_requirement(r) - for r in filter_requirements_with_extras( - cast(str, dist.metadata["Name"]), - dist.requires or [], - extras, - include_default=True, - ) + for r in filter_requirements_with_extras(dist.requires or [], extras, include_default=True) ) for req in requirements: if not req.marker or req.marker.evaluate(marker_env): diff --git a/src/pdm/formats/pipfile.py b/src/pdm/formats/pipfile.py index f119ee9fda..1272e75ed1 100644 --- a/src/pdm/formats/pipfile.py +++ b/src/pdm/formats/pipfile.py @@ -9,7 +9,7 @@ from pdm.compat import tomllib from pdm.formats.base import make_array -from pdm.models.markers import Marker +from pdm.models.markers import Marker, get_marker from pdm.models.requirements import FileRequirement, Requirement if TYPE_CHECKING: @@ -27,10 +27,10 @@ def convert_pipfile_requirement(name: str, req: RequirementDict, backend: BuildB if isinstance(req, dict): markers: list[Marker] = [] if "markers" in req: - markers.append(Marker(req["markers"])) # type: ignore[arg-type] + markers.append(get_marker(req["markers"])) # type: ignore[arg-type] for key in MARKER_KEYS: if key in req: - marker = Marker(f"{key}{req[key]}") + marker = get_marker(f"{key}{req[key]}") markers.append(marker) del req[key] diff --git a/src/pdm/formats/poetry.py b/src/pdm/formats/poetry.py index 1dc8d2fa33..c05463e53d 100644 --- a/src/pdm/formats/poetry.py +++ b/src/pdm/formats/poetry.py @@ -16,7 +16,7 @@ make_inline_table, parse_name_email, ) -from pdm.models.markers import Marker +from pdm.models.markers import Marker, get_marker from pdm.models.requirements import Requirement from pdm.models.specifiers import PySpecSet from pdm.utils import cd @@ -80,9 +80,9 @@ def _convert_req(name: str, req_dict: RequirementDict | list[RequirementDict]) - req_dict["version"] = _convert_specifier(str(req_dict["version"])) markers: list[Marker] = [] if "markers" in req_dict: - markers.append(Marker(req_dict.pop("markers"))) # type: ignore[arg-type] + markers.append(get_marker(req_dict.pop("markers"))) # type: ignore[arg-type] if "python" in req_dict: - markers.append(Marker(_convert_python(str(req_dict.pop("python"))).as_marker_string())) + markers.append(get_marker(_convert_python(str(req_dict.pop("python"))).as_marker_string())) if markers: req_dict["marker"] = str(functools.reduce(operator.and_, markers)).replace('"', "'") if "rev" in req_dict or "branch" in req_dict or "tag" in req_dict: diff --git a/src/pdm/formats/requirements.py b/src/pdm/formats/requirements.py index a14aacfbe6..65cad82b6c 100644 --- a/src/pdm/formats/requirements.py +++ b/src/pdm/formats/requirements.py @@ -1,7 +1,6 @@ from __future__ import annotations import argparse -import dataclasses import hashlib import shlex import urllib.parse @@ -10,7 +9,6 @@ from pdm.formats.base import make_array from pdm.models.requirements import FileRequirement, Requirement, parse_requirement -from pdm.models.specifiers import get_specifier from pdm.utils import expand_env_vars_in_auth if TYPE_CHECKING: @@ -189,7 +187,7 @@ def export( collected_req: set[str] = set() for candidate in sorted(candidates, key=lambda x: x.identify()): # type: ignore[attr-defined] if isinstance(candidate, Candidate): - req = dataclasses.replace(candidate.req, specifier=get_specifier(f"=={candidate.version}"), marker=None) + req = candidate.req.as_pinned_version(candidate.version) else: assert isinstance(candidate, Requirement) req = candidate diff --git a/src/pdm/models/candidates.py b/src/pdm/models/candidates.py index 1e426c174d..44315c635e 100644 --- a/src/pdm/models/candidates.py +++ b/src/pdm/models/candidates.py @@ -611,11 +611,7 @@ def metadata(self) -> im.Distribution: def get_dependencies_from_metadata(self) -> list[str]: """Get the dependencies of a candidate from metadata.""" extras = self.req.extras or () - return filter_requirements_with_extras( - self.req.project_name, # type: ignore[arg-type] - self.metadata.requires or [], - extras, - ) + return filter_requirements_with_extras(self.metadata.requires or [], extras) def should_cache(self) -> bool: """Determine whether to cache the dependencies and built wheel.""" diff --git a/src/pdm/models/markers.py b/src/pdm/models/markers.py index 2be4a01a87..107535c6d6 100644 --- a/src/pdm/models/markers.py +++ b/src/pdm/models/markers.py @@ -1,154 +1,126 @@ from __future__ import annotations -import copy -import itertools import operator +from dataclasses import dataclass from functools import reduce -from typing import Any - +from typing import Any, overload + +from dep_logic.markers import ( + BaseMarker, + InvalidMarker, + MarkerExpression, + MarkerUnion, + MultiMarker, + from_pkg_marker, + parse_marker, +) from packaging.markers import Marker as PackageMarker +from pdm.exceptions import RequirementError from pdm.models.specifiers import PySpecSet -from pdm.utils import join_list_with -class Marker(PackageMarker): - """A subclass of Marker that supports union and intersection merging.""" +@dataclass(frozen=True, unsafe_hash=True, repr=False) +class Marker: + inner: BaseMarker + + def __and__(self, other: Any) -> Marker: + if not isinstance(other, Marker): + return NotImplemented + return type(self)(self.inner & other.inner) + + def __or__(self, other: Any) -> Marker: + if not isinstance(other, Marker): + return NotImplemented + return type(self)(self.inner | other.inner) + + def is_any(self) -> bool: + return self.inner.is_any() + + def is_empty(self) -> bool: + return self.inner.is_empty() + + def __str__(self) -> str: + return str(self.inner) - def copy(self) -> Marker: - inst = self.__class__('os_name == "nt"') - inst._markers = copy.deepcopy(self._markers) - return inst + def __repr__(self) -> str: + return f"" - def __eq__(self, other: Any) -> bool: - if not isinstance(other, PackageMarker): - return False - return str(self) == str(other) + def evaluate(self, environment: dict[str, Any] | None = None) -> bool: + return self.inner.evaluate(environment) - def split_pyspec(self) -> tuple[Marker | None, PySpecSet]: + def split_pyspec(self) -> tuple[Marker, PySpecSet]: """Split `python_version` and `python_full_version` from marker string""" - if _only_contains_python_keys(self._markers): - return None, _build_pyspec_from_marker(self._markers) - if "or" in self._markers: - return self.copy(), PySpecSet() - py_markers = [marker for marker in self._markers if marker != "and" and _only_contains_python_keys(marker)] - rest = [marker for marker in self._markers if marker != "and" and marker not in py_markers] - new_markers = join_list_with(rest, "and") - if not new_markers: - marker = None - else: - marker = self.copy() - marker._markers = new_markers - return marker, _build_pyspec_from_marker(join_list_with(py_markers, "and")) - - -def get_marker(marker: PackageMarker | Marker | None) -> Marker | None: - return Marker(str(marker)) if marker else None - - -def split_marker_extras(marker: str) -> tuple[set[str], str]: - """An element can be stripped from the marker only if all parts are connected - with `and` operator. The rest part are returned as a string or `None` if all are - stripped. - """ - - def extract_extras(submarker: tuple | list) -> set[str]: - if isinstance(submarker, tuple): - if submarker[0].value == "extra": - if submarker[1].value == "==": - return {submarker[2].value} - elif submarker[1].value == "in": - return {v.strip() for v in submarker[2].value.split(",")} - else: - return set() - else: - return set() - else: - if "and" in submarker: - return set() - pure_extras = [extract_extras(m) for m in submarker if m != "or"] - if all(pure_extras): - return set(itertools.chain.from_iterable(pure_extras)) - return set() - - if not marker: - return set(), marker - new_marker = PackageMarker(marker) - submarkers = PackageMarker(marker)._markers - if "or" in submarkers: - extras = extract_extras(submarkers) - if extras: - return extras, "" - return set(), marker - - extras = set() - submarkers_no_extras: list[tuple | list] = [] - # Below this point the submarkers are connected with 'and' - for submarker in submarkers: - if submarker == "and": - continue - new_extras = extract_extras(submarker) - if new_extras: - if extras: - # extras are not allowed to appear in more than one parts - return set(), marker - extras.update(new_extras) - else: - submarkers_no_extras.append(submarker) + python_marker = self.inner.only("python_version", "python_full_version") + if python_marker.is_any(): + return self, PySpecSet() + new_marker = type(self)(self.inner.exclude("python_version").exclude("python_full_version")) + return new_marker, _build_pyspec_from_marker(python_marker) + + def split_extras(self) -> tuple[Marker, Marker]: + """An element can be stripped from the marker only if all parts are connected + with `and` operator. The rest part are returned as a string or `None` if all are + stripped. + """ + return type(self)(self.inner.without_extras()), type(self)(self.inner.only("extra")) + + +@overload +def get_marker(marker: None) -> None: + ... - if not submarkers_no_extras: - return extras, "" - new_marker._markers = join_list_with(submarkers_no_extras, "and") - return extras, str(new_marker) +@overload +def get_marker(marker: PackageMarker | Marker | str) -> Marker: + ... -def _only_contains_python_keys(markers: list[Any]) -> bool: - if isinstance(markers, tuple): - return markers[0].value in ("python_version", "python_full_version") - for marker in markers: - if marker in ("and", "or"): - continue - if not _only_contains_python_keys(marker): - return False - return True +def get_marker(marker: PackageMarker | Marker | str | None) -> Marker | None: + if marker is None: + return None + if isinstance(marker, Marker): + return marker + elif isinstance(marker, PackageMarker): + return Marker(from_pkg_marker(marker)) + try: + return Marker(parse_marker(marker)) + except InvalidMarker as e: + raise RequirementError(f"Invalid marker {marker}: {e}") from e -def _build_pyspec_from_marker(markers: list[Any]) -> PySpecSet: +def _build_pyspec_from_marker(marker: BaseMarker) -> PySpecSet: def split_version(version: str) -> list[str]: if "," in version: return [v.strip() for v in version.split(",")] return version.split() - groups = [PySpecSet()] - for marker in markers: - if isinstance(marker, list): - # It is a submarker - groups[-1] = groups[-1] & _build_pyspec_from_marker(marker) - elif isinstance(marker, tuple): - key, op, version = (i.value for i in marker) - if key == "python_version": - if op == ">": - int_versions = [int(ver) for ver in version.split(".")] - if len(int_versions) < 2: - int_versions.append(0) - int_versions[-1] += 1 - version = ".".join(str(v) for v in int_versions) - op = ">=" - elif op in ("==", "!="): - if len(version.split(".")) < 3: - version += ".*" - elif op in ("in", "not in"): - version = " ".join(v + ".*" for v in split_version(version)) - if op == "in": - pyspec = reduce(operator.or_, (PySpecSet(f"=={v}") for v in split_version(version))) - elif op == "not in": - pyspec = reduce(operator.and_, (PySpecSet(f"!={v}") for v in split_version(version))) - else: - pyspec = PySpecSet(f"{op}{version}") - groups[-1] = groups[-1] & pyspec + if isinstance(marker, MarkerExpression): + name = marker.name + op = marker.op + version = marker.value + if name == "python_version": + if op == ">": + int_versions = [int(ver) for ver in version.split(".")] + if len(int_versions) < 2: + int_versions.append(0) + int_versions[-1] += 1 + version = ".".join(str(v) for v in int_versions) + op = ">=" + elif op in ("==", "!="): + if len(version.split(".")) < 3: + version += ".*" + elif op in ("in", "not in"): + version = " ".join(v + ".*" for v in split_version(version)) + if op == "in": + pyspec = reduce(operator.or_, (PySpecSet(f"=={v}") for v in split_version(version))) + elif op == "not in": + pyspec = reduce(operator.and_, (PySpecSet(f"!={v}") for v in split_version(version))) else: - assert marker in ("and", "or") - if marker == "or": - groups.append(PySpecSet()) - return reduce(operator.or_, groups) + pyspec = PySpecSet(f"{op}{version}") + return pyspec + elif isinstance(marker, MultiMarker): + return reduce(operator.and_, (_build_pyspec_from_marker(m) for m in marker.markers)) + elif isinstance(marker, MarkerUnion): + return reduce(operator.or_, (_build_pyspec_from_marker(m) for m in marker.markers)) + else: # pragma: no cover + raise TypeError(f"Unsupported marker type: {type(marker)}") diff --git a/src/pdm/models/repositories.py b/src/pdm/models/repositories.py index c6be61d8f1..13f1ea2308 100644 --- a/src/pdm/models/repositories.py +++ b/src/pdm/models/repositories.py @@ -416,11 +416,7 @@ def _get_dependencies_from_json(self, candidate: Candidate) -> CandidateInfo: requirement_lines = info["requires_dist"] or [] except KeyError: requirement_lines = info["requires"] or [] - requirements = filter_requirements_with_extras( - cast(str, candidate.req.project_name), - requirement_lines, - candidate.req.extras or (), - ) + requirements = filter_requirements_with_extras(requirement_lines, candidate.req.extras or ()) return requirements, requires_python, summary raise CandidateInfoNotFound(candidate) diff --git a/src/pdm/models/requirements.py b/src/pdm/models/requirements.py index f034072835..242c59392e 100644 --- a/src/pdm/models/requirements.py +++ b/src/pdm/models/requirements.py @@ -9,20 +9,18 @@ import re import secrets import urllib.parse as urlparse -import warnings from pathlib import Path from typing import TYPE_CHECKING, Any, Sequence, TypeVar, cast -from packaging.markers import InvalidMarker from packaging.requirements import InvalidRequirement from packaging.requirements import Requirement as PackageRequirement from packaging.specifiers import SpecifierSet from packaging.utils import parse_sdist_filename, parse_wheel_filename from pdm.compat import Distribution -from pdm.exceptions import ExtrasWarning, RequirementError +from pdm.exceptions import RequirementError from pdm.models.backends import BuildBackend, get_relative_path -from pdm.models.markers import Marker, get_marker, split_marker_extras +from pdm.models.markers import Marker, get_marker from pdm.models.setup import Setup from pdm.models.specifiers import PySpecSet, fix_legacy_specifier, get_specifier from pdm.utils import ( @@ -142,10 +140,7 @@ def __str__(self) -> str: @classmethod def create(cls: type[T], **kwargs: Any) -> T: if "marker" in kwargs: - try: - kwargs["marker"] = get_marker(kwargs["marker"]) - except InvalidMarker as e: - raise RequirementError(f"Invalid marker: {e}") from None + kwargs["marker"] = get_marker(kwargs["marker"]) if "extras" in kwargs and isinstance(kwargs["extras"], str): kwargs["extras"] = tuple(e.strip() for e in kwargs["extras"][1:-1].split(",")) version = kwargs.pop("version", "") @@ -435,34 +430,27 @@ def _parse_url(self) -> None: def filter_requirements_with_extras( - project_name: str, - requirement_lines: list[str], - extras: Sequence[str], - include_default: bool = False, + requirement_lines: list[str], extras: Sequence[str], include_default: bool = False ) -> list[str]: """Filter the requirements with extras. If extras are given, return those with matching extra markers. Otherwise, return those without extra markers. """ - extras = [normalize_name(e) for e in extras] result: list[str] = [] - extras_in_meta: set[str] = set() for req in requirement_lines: _r = parse_requirement(req) + req_extras = get_marker("") if _r.marker: - req_extras, rest = split_marker_extras(str(_r.marker)) - if req_extras: - extras_in_meta.update(req_extras) - _r.marker = Marker(rest) if rest else None - else: - req_extras = set() - if req_extras and not req_extras.isdisjoint(extras) or not req_extras and (include_default or not extras): + rest, req_extras = _r.marker.split_extras() + _r.marker = rest if not rest.is_any() else None + if not req_extras.evaluate({"extra": extras or ""}): + continue + # Add to the requirements if: + # The requirement has no extras while requested extras are empty or include_default is True, or + # The requirement has extras, in which case the `evaluate()` test must have been passed. + if not req_extras.is_any() or include_default or not extras: result.append(_r.as_line()) - extras_not_found = [e for e in extras if e not in extras_in_meta] - if extras_not_found: - warnings.warn(ExtrasWarning(project_name, extras_not_found), stacklevel=2) - return result diff --git a/src/pdm/models/setup.py b/src/pdm/models/setup.py index cf9235e9a3..dfa491b71c 100644 --- a/src/pdm/models/setup.py +++ b/src/pdm/models/setup.py @@ -410,7 +410,7 @@ def metadata(self) -> dict[str, Any]: # type: ignore[override] @property def requires(self) -> list[str] | None: - from pdm.models.markers import Marker + from pdm.models.markers import get_marker from pdm.models.requirements import parse_requirement result = self._data.install_requires @@ -426,6 +426,6 @@ def requires(self) -> list[str] | None: new_marker = f"{old_marker} and {extra_marker}" else: new_marker = extra_marker - parsed.marker = Marker(new_marker) + parsed.marker = get_marker(new_marker) result.append(parsed.as_line()) return result diff --git a/src/pdm/pytest.py b/src/pdm/pytest.py index 9e78078b0d..b52f2bedf6 100644 --- a/src/pdm/pytest.py +++ b/src/pdm/pytest.py @@ -179,7 +179,7 @@ def _get_dependencies_from_fixture(self, candidate: Candidate) -> tuple[list[str except KeyError: raise CandidateInfoNotFound(candidate) from None deps = pypi_data.get("dependencies", []) - deps = filter_requirements_with_extras(cast(str, candidate.req.name), deps, candidate.req.extras or ()) + deps = filter_requirements_with_extras(deps, candidate.req.extras or ()) return deps, pypi_data.get("requires_python", ""), "" def dependency_generators(self) -> Iterable[Callable[[Candidate], CandidateInfo]]: diff --git a/src/pdm/resolver/core.py b/src/pdm/resolver/core.py index bde487cdc8..1e8022ad4d 100644 --- a/src/pdm/resolver/core.py +++ b/src/pdm/resolver/core.py @@ -6,6 +6,7 @@ from pdm.models.candidates import Candidate from pdm.models.repositories import BaseRepository from pdm.models.requirements import strip_extras +from pdm.resolver.graph import merge_markers from pdm.resolver.providers import BaseProvider from pdm.resolver.python import PythonRequirement from pdm.utils import normalize_name @@ -23,6 +24,7 @@ def resolve( requires_python: PySpecSet, max_rounds: int = 10000, keep_self: bool = False, + record_markers: bool = False, ) -> tuple[dict[str, Candidate], dict[tuple[str, str | None], list[Requirement]]]: """Core function to perform the actual resolve process. Return a tuple containing 2 items: @@ -48,10 +50,21 @@ def resolve( local_name = ( normalize_name(repository.environment.project.name) if repository.environment.project.is_library else None ) - for key, candidate in list(result.mapping.items()): + if record_markers: + all_markers = merge_markers(result) + else: + all_markers = {} + for key, candidate in list(mapping.items()): if key is None: continue + if key in all_markers: + marker = all_markers[key] + if marker.is_empty(): + del mapping[key] + continue + candidate.req.marker = None if marker.is_any() else marker + # For source distribution whose name can only be determined after it is built, # the key in the resolution map should be updated. if key.startswith(":empty:"): diff --git a/src/pdm/resolver/graph.py b/src/pdm/resolver/graph.py new file mode 100644 index 0000000000..6960957e37 --- /dev/null +++ b/src/pdm/resolver/graph.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, AbstractSet, Iterable, Iterator, TypeVar, overload + +from pdm.models.markers import Marker, get_marker + +if TYPE_CHECKING: + from resolvelib.resolvers import Criterion, Result + + from pdm.models.candidates import Candidate + from pdm.models.requirements import Requirement + +T = TypeVar("T") + + +class OrderedSet(AbstractSet[T]): + """Set with deterministic ordering.""" + + def __init__(self, iterable: Iterable[T] = ()) -> None: + self._data = list(dict.fromkeys(iterable)) + + def __hash__(self) -> int: + return self._hash() + + def __contains__(self, obj: object) -> bool: + return obj in self._data + + def __iter__(self) -> Iterator[T]: + return iter(self._data) + + def __len__(self) -> int: + return len(self._data) + + +@overload +def _identify_parent(parent: None) -> None: + ... + + +@overload +def _identify_parent(parent: Candidate) -> str: + ... + + +def _identify_parent(parent: Candidate | None) -> str | None: + return parent.identify() if parent else None + + +def merge_markers(result: Result[Requirement, Candidate, str]) -> dict[str, Marker]: + """Traverse through the parent dependencies till the top + and merge any requirement markers on the path. + Return a map of Metaset for each candidate. + """ + all_markers: dict[str, Marker] = {} + unresolved = OrderedSet(result.mapping) + circular: dict[str, OrderedSet[str]] = {} + + while unresolved: + new_markers: dict[str, Marker] = {} + for k in unresolved: + crit = result.criteria[k] + keep_unresolved = circular.get(k, OrderedSet()) + # All parents must be resolved first + if any(p and _identify_parent(p) in (unresolved - keep_unresolved) for p in crit.iter_parent()): + continue + new_markers[k] = _build_marker(crit, all_markers, keep_unresolved) + + if new_markers: + all_markers.update(new_markers) + unresolved -= new_markers # type: ignore[assignment,operator] + else: + # No progress, there are likely circular dependencies. + # Pick one package and keep its parents unresolved now, we will get into it + # after all others are resolved. + package = next((p for p in unresolved if p not in circular), None) + if not package: + break + crit = result.criteria[package] + unresolved_parents = OrderedSet( + filter( + lambda p: p in unresolved and p != package, + (_identify_parent(p) for p in crit.iter_parent() if p), + ) + ) + circular[package] = unresolved_parents + + for key in circular: + crit = result.criteria[key] + all_markers[key] = _build_marker(crit, all_markers, set()) + + return all_markers + + +def _build_marker( + crit: Criterion[Requirement, Candidate, str], resolved: dict[str, Marker], keep_unresolved: AbstractSet[str] +) -> Marker: + marker = None + + for r, parent in crit.information: + if parent and _identify_parent(parent) in keep_unresolved: + continue + this_marker = r.marker if r.marker is not None else get_marker("") + # Use 'and' to connect markers inherited from parent. + if not parent: + parent_marker = get_marker("") + else: + parent_marker = resolved[_identify_parent(parent)] + merged = this_marker & parent_marker + # Use 'or' to connect metasets inherited from different parents. + marker = marker | merged if marker is not None else merged + return marker if marker is not None else get_marker("") diff --git a/tests/fixtures/projects/demo-package-has-dep-with-extras/requirements.txt b/tests/fixtures/projects/demo-package-has-dep-with-extras/requirements.txt index 6ab47db93a..b77256785c 100644 --- a/tests/fixtures/projects/demo-package-has-dep-with-extras/requirements.txt +++ b/tests/fixtures/projects/demo-package-has-dep-with-extras/requirements.txt @@ -2,7 +2,7 @@ # Please do not edit it manually. certifi==2021.10.8 -charset-normalizer==2.0.7 -idna==3.3 +charset-normalizer==2.0.7; python_version >= "3" +idna==3.3; python_version >= "3" requests==2.26.0 urllib3==1.26.7 diff --git a/tests/models/test_candidates.py b/tests/models/test_candidates.py index 6f9fe95030..e9f1d72687 100644 --- a/tests/models/test_candidates.py +++ b/tests/models/test_candidates.py @@ -4,7 +4,6 @@ from unearth import Link from pdm._types import RepositoryConfig -from pdm.exceptions import ExtrasWarning from pdm.models.candidates import Candidate from pdm.models.requirements import parse_requirement from pdm.utils import path_to_url @@ -90,19 +89,6 @@ def test_parse_remote_link_metadata(project): assert candidate.version == "0.0.1" -@pytest.mark.usefixtures("local_finder") -def test_extras_warning(project, recwarn): - req = parse_requirement("demo[foo] @ http://fixtures.test/artifacts/demo-0.0.1-py2.py3-none-any.whl") - candidate = Candidate(req) - prepared = candidate.prepare(project.environment) - assert prepared.link.is_wheel - assert prepared.get_dependencies_from_metadata() == [] - warning = recwarn.pop(ExtrasWarning) - assert str(warning.message) == "Extras not found for demo: [foo]" - assert candidate.name == "demo" - assert candidate.version == "0.0.1" - - @pytest.mark.xfail(reason="packaging 22 no longer supports legacy specifiers") @pytest.mark.usefixtures("local_finder") def test_parse_abnormal_specifiers(project): diff --git a/tests/models/test_marker.py b/tests/models/test_marker.py index 7250f12fff..cecaa85207 100644 --- a/tests/models/test_marker.py +++ b/tests/models/test_marker.py @@ -1,48 +1,18 @@ import pytest -from pdm.models.markers import Marker, split_marker_extras - - -@pytest.mark.parametrize( - "original,extras,rest", - [ - ("extra == 'foo'", {"foo"}, ""), - ("extra != 'foo'", set(), 'extra != "foo"'), - ("extra == 'foo' or extra == 'bar'", {"foo", "bar"}, ""), - ("os_name == 'nt'", set(), 'os_name == "nt"'), - ("extra in 'foo,bar'", {"foo", "bar"}, ""), - ( - "os_name == 'nt' and (extra == 'foo' or extra == 'bar')", - {"foo", "bar"}, - 'os_name == "nt"', - ), - ( - 'extra == "foo" and extra == "bar"', - set(), - 'extra == "foo" and extra == "bar"', - ), - ( - "os_name == 'nt' and (extra == 'foo' or sys_platform == 'Windows')", - set(), - 'os_name == "nt" and (extra == "foo" or sys_platform == "Windows")', - ), - ], -) -def test_split_marker_extras(original, extras, rest): - result = split_marker_extras(original) - assert result == (extras, rest) +from pdm.models.markers import get_marker @pytest.mark.parametrize( "original,marker,py_spec", [ - ("python_version > '3'", None, ">=3.1"), - ("python_version > '3.8'", None, ">=3.9"), - ("python_version != '3.8'", None, "!=3.8.*"), - ("python_version == '3.7'", None, ">=3.7,<3.8"), - ("python_version in '3.6 3.7'", None, ">=3.6,<3.8"), - ("python_full_version >= '3.6.0'", None, ">=3.6"), - ("python_full_version not in '3.8.3'", None, "!=3.8.3"), + ("python_version > '3'", "", ">=3.1"), + ("python_version > '3.8'", "", ">=3.9"), + ("python_version != '3.8'", "", "!=3.8.*"), + ("python_version == '3.7'", "", ">=3.7,<3.8"), + ("python_version in '3.6 3.7'", "", ">=3.6,<3.8"), + ("python_full_version >= '3.6.0'", "", ">=3.6"), + ("python_full_version not in '3.8.3'", "", "!=3.8.3"), # mixed marker and python version ("python_version > '3.7' and os_name == 'nt'", 'os_name == "nt"', ">=3.8"), ( @@ -53,7 +23,7 @@ def test_split_marker_extras(original, extras, rest): ], ) def test_split_pyspec(original, marker, py_spec): - m = Marker(original) + m = get_marker(original) a, b = m.split_pyspec() - assert marker == (str(a) if a is not None else None) + assert marker == str(a) assert py_spec == str(b) diff --git a/tests/models/test_requirements.py b/tests/models/test_requirements.py index 687a4ca236..ea2eb4ac94 100644 --- a/tests/models/test_requirements.py +++ b/tests/models/test_requirements.py @@ -2,7 +2,7 @@ import pytest -from pdm.models.requirements import RequirementError, parse_requirement +from pdm.models.requirements import RequirementError, filter_requirements_with_extras, parse_requirement from pdm.utils import PACKAGING_22, path_to_url from tests import FIXTURES @@ -94,3 +94,33 @@ def test_illegal_requirement_line(line, expected): def test_not_supported_editable_requirement(line): with pytest.raises(RequirementError, match="Editable requirement is only supported"): parse_requirement(line, True) + + +def test_filter_requirements_with_extras(): + requirements = [ + "foo; extra == 'a'", + "bar; extra == 'b'", + "baz; extra == 'a' or extra == 'b'", + "qux; extra == 'a' and extra == 'b'", + "ping; os_name == 'nt' and extra == 'a'", + "blah", + ] + assert filter_requirements_with_extras(requirements, ()) == ["blah"] + assert filter_requirements_with_extras(requirements, ("a",)) == ["foo", "baz", 'ping; os_name == "nt"'] + assert filter_requirements_with_extras(requirements, ("b",)) == ["bar", "baz"] + assert filter_requirements_with_extras(requirements, ("a", "b")) == [ + "foo", + "bar", + "baz", + "qux", + 'ping; os_name == "nt"', + ] + assert filter_requirements_with_extras(requirements, ("c",)) == [] + assert filter_requirements_with_extras(requirements, ("a", "b"), include_default=True) == [ + "foo", + "bar", + "baz", + "qux", + 'ping; os_name == "nt"', + "blah", + ] diff --git a/tests/resolver/test_resolve.py b/tests/resolver/test_resolve.py index 0a6a433190..a9118e9953 100644 --- a/tests/resolver/test_resolve.py +++ b/tests/resolver/test_resolve.py @@ -19,6 +19,7 @@ def resolve_func( strategy="all", tracked_names=None, direct_minimal_versions=False, + record_markers=False, ): repository.environment.python_requires = PySpecSet(requires_python) if allow_prereleases is not None: @@ -35,7 +36,9 @@ def resolve_func( with ui.open_spinner("Resolving dependencies") as spin, ui.logging("lock"): reporter = SpinnerReporter(spin, requirements) resolver = Resolver(provider, reporter) - mapping, *_ = _resolve(resolver, requirements, repository.environment.python_requires) + mapping, *_ = _resolve( + resolver, requirements, repository.environment.python_requires, record_markers=record_markers + ) return mapping return resolve_func @@ -339,3 +342,37 @@ def test_resolve_direct_minimal_versions(resolve, repository, project): result = resolve(["django"], ">=3.6", direct_minimal_versions=True) assert result["django"].version == "1.11.8" assert result["pytz"].version == "2019.6" + + +def test_resolve_record_markers(resolve, repository, project): + repository.add_candidate("A", "1.0") + repository.add_candidate("B", "1.0") + repository.add_candidate("C", "1.0") + repository.add_candidate("D", "1.0") + repository.add_candidate("E", "1.0") + repository.add_candidate("F", "1.0") + repository.add_dependencies("A", "1.0", ["B; os_name == 'posix'", "C; os_name=='nt'"]) + # package D has transitive markers that conflict + repository.add_dependencies("C", "1.0", ["D; os_name!='nt'", "E; python_version < '3.8'"]) + # package E has union markers + repository.add_dependencies("B", "1.0", ["E; python_version >= '3.7'"]) + # B -> E -> F -> B has circular dependency + repository.add_dependencies("E", "1.0", ["F; platform_machine=='x86_64'"]) + repository.add_dependencies("F", "1.0", ["B"]) + + result = resolve(["A"], ">=3.6", record_markers=True) + assert result["a"].version == "1.0" + assert "d" not in result + assert ( + str(result["e"].req.marker) + == 'python_version >= "3.7" and os_name == "posix" or python_version < "3.8" and os_name == "nt"' + ) + assert ( + str(result["f"].req.marker) + == 'python_version >= "3.7" and os_name == "posix" and platform_machine == "x86_64" or ' + 'python_version < "3.8" and os_name == "nt" and platform_machine == "x86_64"' + ) + assert ( + str(result["b"].req.marker) == 'os_name == "posix" or (os_name == "posix" or os_name == "nt") and ' + 'platform_machine == "x86_64" and python_version < "3.8"' + ) diff --git a/tests/test_formats.py b/tests/test_formats.py index b5deba0cc8..6eba8576c8 100644 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -71,10 +71,7 @@ def test_convert_poetry(project): assert "repository" in result["urls"] assert result["requires-python"] == ">=2.7,<4.0,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" assert 'cleo<1.0.0,>=0.7.6; python_version ~= "2.7"' in result["dependencies"] - assert ( - 'cachecontrol[filecache]<1.0.0,>=0.12.4; python_version >= "3.4" ' - 'and python_version < "4.0"' in result["dependencies"] - ) + assert 'cachecontrol[filecache]<1.0.0,>=0.12.4; python_version ~= "3.4"' in result["dependencies"] assert "babel==2.9.0" in result["dependencies"] assert "mysql" in result["optional-dependencies"] assert "psycopg2<3.0,>=2.7" in result["optional-dependencies"]["pgsql"]