diff --git a/pex/build_system/__init__.py b/pex/build_system/__init__.py index 254ca6d60..029046b60 100644 --- a/pex/build_system/__init__.py +++ b/pex/build_system/__init__.py @@ -3,6 +3,32 @@ from __future__ import absolute_import +import json +import os +import subprocess +from textwrap import dedent + +from pex.common import REPRODUCIBLE_BUILDS_ENV, CopyMode, safe_mkdtemp +from pex.dist_metadata import Distribution +from pex.interpreter import PythonInterpreter +from pex.jobs import Job, SpawnedJob +from pex.pex import PEX +from pex.pex_bootstrapper import VenvPex, ensure_venv +from pex.pex_builder import PEXBuilder +from pex.result import Error +from pex.typing import TYPE_CHECKING +from pex.variables import ENV +from pex.venv.bin_path import BinPath +from pex.venv.virtualenv import Virtualenv + +if TYPE_CHECKING: + from typing import Any, Iterable, Mapping, Optional, Tuple, Union + + import attr # vendor:skip +else: + from pex.third_party import attr + + # The split of PEP-517 / PEP-518 is quite awkward. PEP-518 doesn't really work without also # specifying a build backend or knowing a default value for one, but the concept is not defined # until PEP-517. As such, we break this historical? strange division and define the default outside @@ -10,3 +36,174 @@ # # See: https://peps.python.org/pep-0517/#source-trees DEFAULT_BUILD_BACKEND = "setuptools.build_meta:__legacy__" +DEFAULT_BUILD_REQUIRES = ("setuptools",) + + +@attr.s(frozen=True) +class BuildSystemTable(object): + requires = attr.ib() # type: Tuple[str, ...] + build_backend = attr.ib(default=DEFAULT_BUILD_BACKEND) # type: str + backend_path = attr.ib(default=()) # type: Tuple[str, ...] + + +DEFAULT_BUILD_SYSTEM_TABLE = BuildSystemTable( + requires=DEFAULT_BUILD_REQUIRES, build_backend=DEFAULT_BUILD_BACKEND +) + + +# Exit code 75 is EX_TEMPFAIL defined in /usr/include/sysexits.h +# this seems an appropriate signal of DNE vs execute and fail. +_HOOK_UNAVAILABLE_EXIT_CODE = 75 + + +@attr.s(frozen=True) +class BuildSystem(object): + @classmethod + def create( + cls, + interpreter, # type: PythonInterpreter + requires, # type: Iterable[str] + resolved, # type: Iterable[Distribution] + build_backend, # type: str + backend_path, # type: Tuple[str, ...] + extra_requirements=None, # type: Optional[Iterable[str]] + use_system_time=False, # type: bool + **extra_env # type: str + ): + # type: (...) -> Union[BuildSystem, Error] + pex_builder = PEXBuilder(copy_mode=CopyMode.SYMLINK) + pex_builder.info.venv = True + pex_builder.info.venv_site_packages_copies = True + pex_builder.info.venv_bin_path = BinPath.PREPEND + # Allow REPRODUCIBLE_BUILDS_ENV PYTHONHASHSEED env var to take effect. + pex_builder.info.venv_hermetic_scripts = False + for req in requires: + pex_builder.add_requirement(req) + for dist in resolved: + pex_builder.add_distribution(dist) + pex_builder.freeze(bytecode_compile=False) + venv_pex = ensure_venv(PEX(pex_builder.path(), interpreter=interpreter)) + if extra_requirements: + # N.B.: We install extra requirements separately instead of having them resolved and + # handed in with the `resolved` above because there are cases in the wild where the + # build system requires (PEP-518) and the results of PEP-517 `get_requires_for_*` can + # return overlapping requirements. Pip will error for overlaps complaining of duplicate + # requirements if we attempt to resolve all the requirements at once; so we instead + # resolve and install in two phases. This obviously has problems! That said, it is, in + # fact, how Pip's internal PEP-517 build frontend works; so we emulate that. + virtualenv = Virtualenv(venv_pex.venv_dir) + # Python 3.5 comes with Pip 9.0.1 which is pretty broken: it doesn't work with our test + # cases; so we upgrade. + # For Python 2.7 we use virtualenv (there is no -m venv built into Python) and that + # comes with Pip 22.0.2, Python 3.6 comes with Pip 18.1 and Python 3.7 comes with + # Pip 22.04 and the default Pips only get newer with newer version of Pythons. These all + # work well enough for our test cases and, in general, they should work well enough with + # the Python they come paired with. + upgrade_pip = virtualenv.interpreter.version[:2] == (3, 5) + virtualenv.ensure_pip(upgrade=upgrade_pip) + with open(os.devnull, "wb") as dev_null: + _, process = virtualenv.interpreter.open_process( + args=[ + "-m", + "pip", + "install", + "--ignore-installed", + "--no-user", + "--no-warn-script-location", + ] + + list(extra_requirements), + stdout=dev_null, + stderr=subprocess.PIPE, + ) + _, stderr = process.communicate() + if process.returncode != 0: + return Error( + "Failed to install extra requirement in venv at {venv_dir}: " + "{extra_requirements}\nSTDERR:\n{stderr}".format( + venv_dir=venv_pex.venv_dir, + extra_requirements=", ".join(extra_requirements), + stderr=stderr.decode("utf-8"), + ) + ) + + # Ensure all PEX* env vars are stripped except for PEX_ROOT and PEX_VERBOSE. We want folks + # to be able to steer the location of the cache and the logging verbosity, but nothing else. + # We control the entry-point, etc. of the PEP-518 build backend venv for internal use. + with ENV.strip().patch(PEX_ROOT=ENV.PEX_ROOT, PEX_VERBOSE=str(ENV.PEX_VERBOSE)) as env: + if extra_env: + env.update(extra_env) + if backend_path: + env.update(PEX_EXTRA_SYS_PATH=os.pathsep.join(backend_path)) + if not use_system_time: + env.update(REPRODUCIBLE_BUILDS_ENV) + return cls( + venv_pex=venv_pex, build_backend=build_backend, requires=tuple(requires), env=env + ) + + venv_pex = attr.ib() # type: VenvPex + build_backend = attr.ib() # type: str + requires = attr.ib() # type: Tuple[str, ...] + env = attr.ib() # type: Mapping[str, str] + + def invoke_build_hook( + self, + project_directory, # type: str + hook_method, # type: str + hook_args=(), # type: Iterable[Any] + hook_kwargs=None, # type: Optional[Mapping[str, Any]] + ): + # type: (...) -> Union[SpawnedJob[Any], Error] + + # The interfaces are spec'd here: https://peps.python.org/pep-0517 + build_backend_module, _, _ = self.build_backend.partition(":") + build_backend_object = self.build_backend.replace(":", ".") + build_hook_result = os.path.join( + safe_mkdtemp(prefix="pex-pep-517."), "build_hook_result.json" + ) + args = self.venv_pex.execute_args( + additional_args=( + "-c", + dedent( + """\ + import json + import sys + + import {build_backend_module} + + + if not hasattr({build_backend_object}, {hook_method!r}): + sys.exit({hook_unavailable_exit_code}) + + result = {build_backend_object}.{hook_method}(*{hook_args!r}, **{hook_kwargs!r}) + with open({result_file!r}, "w") as fp: + json.dump(result, fp) + """ + ).format( + build_backend_module=build_backend_module, + build_backend_object=build_backend_object, + hook_method=hook_method, + hook_args=tuple(hook_args), + hook_kwargs=dict(hook_kwargs) if hook_kwargs else {}, + hook_unavailable_exit_code=_HOOK_UNAVAILABLE_EXIT_CODE, + result_file=build_hook_result, + ), + ) + ) + process = subprocess.Popen( + args=args, + env=self.env, + cwd=project_directory, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + return SpawnedJob.file( + Job( + command=args, + process=process, + context="PEP-517:{hook_method} at {project_directory}".format( + hook_method=hook_method, project_directory=project_directory + ), + ), + output_file=build_hook_result, + result_func=lambda file_content: json.loads(file_content.decode("utf-8")), + ) diff --git a/pex/build_system/pep_517.py b/pex/build_system/pep_517.py index 5f1573813..99ce8195b 100644 --- a/pex/build_system/pep_517.py +++ b/pex/build_system/pep_517.py @@ -3,14 +3,11 @@ from __future__ import absolute_import -import json import os -import subprocess -from textwrap import dedent from pex import third_party -from pex.build_system import DEFAULT_BUILD_BACKEND -from pex.build_system.pep_518 import BuildSystem, load_build_system +from pex.build_system import DEFAULT_BUILD_BACKEND, BuildSystem +from pex.build_system.pep_518 import load_build_system from pex.common import safe_mkdtemp from pex.dist_metadata import DistMetadata, Distribution, MetadataType from pex.jobs import Job, SpawnedJob @@ -22,7 +19,7 @@ from pex.typing import TYPE_CHECKING, cast if TYPE_CHECKING: - from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Text, Union + from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Text, Tuple, Union _DEFAULT_BUILD_SYSTEMS = {} # type: Dict[PipVersionValue, BuildSystem] @@ -134,67 +131,21 @@ def _invoke_build_hook( ) ) - build_system_or_error = _get_build_system( + result = _get_build_system( target, resolver, project_directory, extra_requirements=hook_extra_requirements, pip_version=pip_version, ) - if isinstance(build_system_or_error, Error): - return build_system_or_error - build_system = build_system_or_error - - # The interfaces are spec'd here: https://peps.python.org/pep-0517 - build_backend_module, _, _ = build_system.build_backend.partition(":") - build_backend_object = build_system.build_backend.replace(":", ".") - build_hook_result = os.path.join(safe_mkdtemp(prefix="pex-pep-517."), "build_hook_result.json") - args = build_system.venv_pex.execute_args( - additional_args=( - "-c", - dedent( - """\ - import json - import sys - - import {build_backend_module} - - - if not hasattr({build_backend_object}, {hook_method!r}): - sys.exit({hook_unavailable_exit_code}) - - result = {build_backend_object}.{hook_method}(*{hook_args!r}, **{hook_kwargs!r}) - with open({result_file!r}, "w") as fp: - json.dump(result, fp) - """ - ).format( - build_backend_module=build_backend_module, - build_backend_object=build_backend_object, - hook_method=hook_method, - hook_args=tuple(hook_args), - hook_kwargs=dict(hook_kwargs) if hook_kwargs else {}, - hook_unavailable_exit_code=_HOOK_UNAVAILABLE_EXIT_CODE, - result_file=build_hook_result, - ), - ) - ) - process = subprocess.Popen( - args=args, - env=build_system.env, - cwd=project_directory, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - return SpawnedJob.file( - Job( - command=args, - process=process, - context="PEP-517:{hook_method} at {project_directory}".format( - hook_method=hook_method, project_directory=project_directory - ), - ), - output_file=build_hook_result, - result_func=lambda file_content: json.loads(file_content.decode("utf-8")), + if isinstance(result, Error): + return result + + return result.invoke_build_hook( + project_directory=project_directory, + hook_method=hook_method, + hook_args=hook_args, + hook_kwargs=hook_kwargs, ) @@ -248,15 +199,14 @@ def build_sdist( return os.path.join(dist_dir, sdist_relpath) -def spawn_prepare_metadata( +def get_requires_for_build_wheel( project_directory, # type: str target, # type: Target resolver, # type: Resolver pip_version=None, # type: Optional[PipVersionValue] ): - # type: (...) -> SpawnedJob[DistMetadata] + # type: (...) -> Tuple[str, ...] - extra_requirements = [] spawned_job = try_( _invoke_build_hook( project_directory, @@ -267,11 +217,24 @@ def spawn_prepare_metadata( ) ) try: - extra_requirements.extend(spawned_job.await_result()) + return tuple(spawned_job.await_result()) except Job.Error as e: if e.exitcode != _HOOK_UNAVAILABLE_EXIT_CODE: raise e + return () + +def spawn_prepare_metadata( + project_directory, # type: str + target, # type: Target + resolver, # type: Resolver + pip_version=None, # type: Optional[PipVersionValue] +): + # type: (...) -> SpawnedJob[DistMetadata] + + extra_requirements = get_requires_for_build_wheel( + project_directory, target, resolver, pip_version=pip_version + ) build_dir = os.path.join(safe_mkdtemp(), "build") os.mkdir(build_dir) spawned_job = try_( diff --git a/pex/build_system/pep_518.py b/pex/build_system/pep_518.py index 09387dfce..59a5add8c 100644 --- a/pex/build_system/pep_518.py +++ b/pex/build_system/pep_518.py @@ -4,38 +4,22 @@ from __future__ import absolute_import import os.path -import subprocess from pex import toml -from pex.build_system import DEFAULT_BUILD_BACKEND -from pex.common import REPRODUCIBLE_BUILDS_ENV, CopyMode -from pex.dist_metadata import Distribution -from pex.interpreter import PythonInterpreter -from pex.pex import PEX -from pex.pex_bootstrapper import VenvPex, ensure_venv -from pex.pex_builder import PEXBuilder +from pex.build_system import ( + DEFAULT_BUILD_BACKEND, + DEFAULT_BUILD_SYSTEM_TABLE, + BuildSystem, + BuildSystemTable, +) from pex.resolve.resolvers import Resolver from pex.result import Error from pex.targets import LocalInterpreter, Target, Targets from pex.tracer import TRACER from pex.typing import TYPE_CHECKING -from pex.variables import ENV -from pex.venv.bin_path import BinPath -from pex.venv.virtualenv import Virtualenv if TYPE_CHECKING: - from typing import Iterable, Mapping, Optional, Tuple, Union - - import attr # vendor:skip -else: - from pex.third_party import attr - - -@attr.s(frozen=True) -class BuildSystemTable(object): - requires = attr.ib() # type: Tuple[str, ...] - build_backend = attr.ib(default=DEFAULT_BUILD_BACKEND) # type: str - backend_path = attr.ib(default=()) # type: Tuple[str, ...] + from typing import Iterable, Optional, Union def _read_build_system_table( @@ -69,94 +53,23 @@ def _read_build_system_table( ) -@attr.s(frozen=True) -class BuildSystem(object): - @classmethod - def create( - cls, - interpreter, # type: PythonInterpreter - requires, # type: Iterable[str] - resolved, # type: Iterable[Distribution] - build_backend, # type: str - backend_path, # type: Tuple[str, ...] - extra_requirements=None, # type: Optional[Iterable[str]] - use_system_time=False, # type: bool - **extra_env # type: str - ): - # type: (...) -> Union[BuildSystem, Error] - pex_builder = PEXBuilder(copy_mode=CopyMode.SYMLINK) - pex_builder.info.venv = True - pex_builder.info.venv_site_packages_copies = True - pex_builder.info.venv_bin_path = BinPath.PREPEND - # Allow REPRODUCIBLE_BUILDS_ENV PYTHONHASHSEED env var to take effect. - pex_builder.info.venv_hermetic_scripts = False - for req in requires: - pex_builder.add_requirement(req) - for dist in resolved: - pex_builder.add_distribution(dist) - pex_builder.freeze(bytecode_compile=False) - venv_pex = ensure_venv(PEX(pex_builder.path(), interpreter=interpreter)) - if extra_requirements: - # N.B.: We install extra requirements separately instead of having them resolved and - # handed in with the `resolved` above because there are cases in the wild where the - # build system requires (PEP-518) and the results of PEP-517 `get_requires_for_*` can - # return overlapping requirements. Pip will error for overlaps complaining of duplicate - # requirements if we attempt to resolve all the requirements at once; so we instead - # resolve and install in two phases. This obviously has problems! That said, it is, in - # fact, how Pip's internal PEP-517 build frontend works; so we emulate that. - virtualenv = Virtualenv(venv_pex.venv_dir) - # Python 3.5 comes with Pip 9.0.1 which is pretty broken: it doesn't work with our test - # cases; so we upgrade. - # For Python 2.7 we use virtualenv (there is no -m venv built into Python) and that - # comes with Pip 22.0.2, Python 3.6 comes with Pip 18.1 and Python 3.7 comes with - # Pip 22.04 and the default Pips only get newer with newer version of Pythons. These all - # work well enough for our test cases and, in general, they should work well enough with - # the Python they come paired with. - upgrade_pip = virtualenv.interpreter.version[:2] == (3, 5) - virtualenv.ensure_pip(upgrade=upgrade_pip) - with open(os.devnull, "wb") as dev_null: - _, process = virtualenv.interpreter.open_process( - args=[ - "-m", - "pip", - "install", - "--ignore-installed", - "--no-user", - "--no-warn-script-location", - ] - + list(extra_requirements), - stdout=dev_null, - stderr=subprocess.PIPE, - ) - _, stderr = process.communicate() - if process.returncode != 0: - return Error( - "Failed to install extra requirement in venv at {venv_dir}: " - "{extra_requirements}\nSTDERR:\n{stderr}".format( - venv_dir=venv_pex.venv_dir, - extra_requirements=", ".join(extra_requirements), - stderr=stderr.decode("utf-8"), - ) - ) - - # Ensure all PEX* env vars are stripped except for PEX_ROOT and PEX_VERBOSE. We want folks - # to be able to steer the location of the cache and the logging verbosity, but nothing else. - # We control the entry-point, etc. of the PEP-518 build backend venv for internal use. - with ENV.strip().patch(PEX_ROOT=ENV.PEX_ROOT, PEX_VERBOSE=str(ENV.PEX_VERBOSE)) as env: - if extra_env: - env.update(extra_env) - if backend_path: - env.update(PEX_EXTRA_SYS_PATH=os.pathsep.join(backend_path)) - if not use_system_time: - env.update(REPRODUCIBLE_BUILDS_ENV) - return cls( - venv_pex=venv_pex, build_backend=build_backend, requires=tuple(requires), env=env - ) +def _maybe_load_build_system_table(project_directory): + # type: (str) -> Union[Optional[BuildSystemTable], Error] + + # The interface is spec'd here: https://peps.python.org/pep-0518/ + pyproject_toml = os.path.join(project_directory, "pyproject.toml") + if not os.path.isfile(pyproject_toml): + return None + return _read_build_system_table(pyproject_toml) + - venv_pex = attr.ib() # type: VenvPex - build_backend = attr.ib() # type: str - requires = attr.ib() # type: Tuple[str, ...] - env = attr.ib() # type: Mapping[str, str] +def load_build_system_table(project_directory): + # type: (str) -> Union[BuildSystemTable, Error] + + maybe_build_system_table_or_error = _maybe_load_build_system_table(project_directory) + if maybe_build_system_table_or_error is not None: + return maybe_build_system_table_or_error + return DEFAULT_BUILD_SYSTEM_TABLE def load_build_system( @@ -167,12 +80,7 @@ def load_build_system( ): # type: (...) -> Union[Optional[BuildSystem], Error] - # The interface is spec'd here: https://peps.python.org/pep-0518/ - pyproject_toml = os.path.join(project_directory, "pyproject.toml") - if not os.path.isfile(pyproject_toml): - return None - - maybe_build_system_table_or_error = _read_build_system_table(pyproject_toml) + maybe_build_system_table_or_error = _maybe_load_build_system_table(project_directory) if not isinstance(maybe_build_system_table_or_error, BuildSystemTable): return maybe_build_system_table_or_error build_system_table = maybe_build_system_table_or_error diff --git a/pex/cli/commands/lock.py b/pex/cli/commands/lock.py index 942de3eac..0937179be 100644 --- a/pex/cli/commands/lock.py +++ b/pex/cli/commands/lock.py @@ -542,6 +542,20 @@ def add_create_lock_options(cls, create_parser): "extras deps that are never activated, but may trim more in the future." ), ) + create_parser.add_argument( + "--lock-build-systems", + "--no-lock-build-systems", + dest="lock_build_systems", + default=False, + action=HandleBoolAction, + type=bool, + help=( + "When creating a lock that includes sdists, VCS requirements or local project " + "directories that will later need to be built into wheels when using the lock, " + "also lock the build system for each of these source tree artifacts to ensure " + "consistent build environments at future times." + ), + ) cls._add_lock_options(create_parser) cls._add_resolve_options(create_parser) cls.add_json_options(create_parser, entity="lock", include_switch=False) @@ -818,6 +832,33 @@ def add_extra_arguments( ) as sync_parser: cls._add_sync_arguments(sync_parser) + def _get_lock_configuration(self, target_configuration): + # type: (TargetConfiguration) -> Union[LockConfiguration, Error] + if self.options.style is LockStyle.UNIVERSAL: + return LockConfiguration( + style=LockStyle.UNIVERSAL, + requires_python=tuple( + str(interpreter_constraint.requires_python) + for interpreter_constraint in target_configuration.interpreter_constraints + ), + target_systems=tuple(self.options.target_systems), + elide_unused_requires_dist=self.options.elide_unused_requires_dist, + lock_build_systems=self.options.lock_build_systems, + ) + + if self.options.target_systems: + return Error( + "The --target-system option only applies to --style {universal} locks.".format( + universal=LockStyle.UNIVERSAL.value + ) + ) + + return LockConfiguration( + style=self.options.style, + elide_unused_requires_dist=self.options.elide_unused_requires_dist, + lock_build_systems=self.options.lock_build_systems, + ) + def _resolve_targets( self, action, # type: str @@ -907,28 +948,7 @@ def _create(self): target_configuration = target_options.configure( self.options, pip_configuration=pip_configuration ) - if self.options.style == LockStyle.UNIVERSAL: - lock_configuration = LockConfiguration( - style=LockStyle.UNIVERSAL, - requires_python=tuple( - str(interpreter_constraint.requires_python) - for interpreter_constraint in target_configuration.interpreter_constraints - ), - target_systems=tuple(self.options.target_systems), - elide_unused_requires_dist=self.options.elide_unused_requires_dist, - ) - elif self.options.target_systems: - return Error( - "The --target-system option only applies to --style {universal} locks.".format( - universal=LockStyle.UNIVERSAL.value - ) - ) - else: - lock_configuration = LockConfiguration( - style=self.options.style, - elide_unused_requires_dist=self.options.elide_unused_requires_dist, - ) - + lock_configuration = try_(self._get_lock_configuration(target_configuration)) targets = try_( self._resolve_targets( action="creating", @@ -1491,8 +1511,8 @@ def process_req_edits( lock_file=attr.evolve( lock_file, pex_version=__version__, - requirements=SortedTuple(requirements_by_project_name.values(), key=str), - constraints=SortedTuple(constraints_by_project_name.values(), key=str), + requirements=SortedTuple(requirements_by_project_name.values()), + constraints=SortedTuple(constraints_by_project_name.values()), locked_resolves=SortedTuple( resolve_update.updated_resolve for resolve_update in lock_update.resolves ), @@ -1576,28 +1596,7 @@ def _sync(self): target_configuration = target_options.configure( self.options, pip_configuration=pip_configuration ) - if self.options.style == LockStyle.UNIVERSAL: - lock_configuration = LockConfiguration( - style=LockStyle.UNIVERSAL, - requires_python=tuple( - str(interpreter_constraint.requires_python) - for interpreter_constraint in target_configuration.interpreter_constraints - ), - target_systems=tuple(self.options.target_systems), - elide_unused_requires_dist=self.options.elide_unused_requires_dist, - ) - elif self.options.target_systems: - return Error( - "The --target-system option only applies to --style {universal} locks.".format( - universal=LockStyle.UNIVERSAL.value - ) - ) - else: - lock_configuration = LockConfiguration( - style=self.options.style, - elide_unused_requires_dist=self.options.elide_unused_requires_dist, - ) - + lock_configuration = try_(self._get_lock_configuration(target_configuration)) lock_file_path = self.options.lock if os.path.exists(lock_file_path): build_configuration = pip_configuration.build_configuration diff --git a/pex/dist_metadata.py b/pex/dist_metadata.py index e1dc2ea76..c445015d2 100644 --- a/pex/dist_metadata.py +++ b/pex/dist_metadata.py @@ -730,7 +730,8 @@ def __str__(self): ) -@attr.s(frozen=True) +@functools.total_ordering +@attr.s(frozen=True, order=False) class Constraint(object): @classmethod def parse( @@ -849,8 +850,14 @@ def as_requirement(self): # type: () -> Requirement return Requirement(name=self.name, specifier=self.specifier, marker=self.marker) + def __lt__(self, other): + # type: (Any) -> bool + if not isinstance(other, Constraint): + return NotImplemented + return self._str < other._str -@attr.s(frozen=True) + +@attr.s(frozen=True, order=False) class Requirement(Constraint): @classmethod def parse( @@ -899,6 +906,12 @@ def as_constraint(self): # type: () -> Constraint return Constraint(name=self.name, specifier=self.specifier, marker=self.marker) + def __lt__(self, other): + # type: (Any) -> bool + if not isinstance(other, Requirement): + return NotImplemented + return self._str < other._str + # N.B.: DistributionMetadata can have an expensive hash when a distribution has many requirements; # so we cache the hash. See: https://github.com/pex-tool/pex/issues/1928 diff --git a/pex/pip/vcs.py b/pex/pip/vcs.py index e954590f3..89ac1cab8 100644 --- a/pex/pip/vcs.py +++ b/pex/pip/vcs.py @@ -7,6 +7,8 @@ import re from pex import hashing +from pex.build_system import BuildSystemTable +from pex.build_system.pep_518 import load_build_system_table from pex.common import is_pyc_dir, is_pyc_file, open_zip, temporary_dir from pex.hashing import Sha256 from pex.pep_440 import Version @@ -61,7 +63,7 @@ def fingerprint_downloaded_vcs_archive( version, # type: str vcs, # type: VCS.Value ): - # type: (...) -> Tuple[Fingerprint, str] + # type: (...) -> Tuple[Fingerprint, BuildSystemTable, str] archive_path = try_( _find_built_source_dist( @@ -69,8 +71,8 @@ def fingerprint_downloaded_vcs_archive( ) ) digest = Sha256() - digest_vcs_archive(archive_path=archive_path, vcs=vcs, digest=digest) - return Fingerprint.from_digest(digest), archive_path + build_system_table = digest_vcs_archive(archive_path=archive_path, vcs=vcs, digest=digest) + return Fingerprint.from_digest(digest), build_system_table, archive_path def digest_vcs_archive( @@ -78,7 +80,7 @@ def digest_vcs_archive( vcs, # type: VCS.Value digest, # type: HintedDigest ): - # type: (...) -> None + # type: (...) -> BuildSystemTable # All VCS requirements are prepared as zip archives as encoded in: # `pip._internal.req.req_install.InstallRequirement.archive`. @@ -109,3 +111,5 @@ def digest_vcs_archive( ), file_filter=lambda f: not is_pyc_file(f), ) + + return try_(load_build_system_table(chroot)) diff --git a/pex/resolve/build_systems.py b/pex/resolve/build_systems.py new file mode 100644 index 000000000..78f96be8d --- /dev/null +++ b/pex/resolve/build_systems.py @@ -0,0 +1,117 @@ +# Copyright 2024 Pex project contributors. +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import + +import os.path +import tarfile +from collections import OrderedDict + +from pex.build_system import DEFAULT_BUILD_SYSTEM_TABLE, BuildSystemTable +from pex.build_system.pep_518 import load_build_system_table +from pex.common import open_zip, safe_mkdtemp +from pex.dist_metadata import is_sdist, is_tar_sdist, is_zip_sdist +from pex.exceptions import production_assert, reportable_unexpected_error_msg +from pex.jobs import iter_map_parallel +from pex.resolve.resolved_requirement import PartialArtifact +from pex.resolve.resolvers import Resolver +from pex.result import try_ +from pex.typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: + from typing import Iterable, Iterator, Optional, Tuple + + import attr # vendor:skip +else: + from pex.third_party import attr + + +def extract_build_system_table(source_archive_path): + # type: (str) -> BuildSystemTable + + if is_tar_sdist(source_archive_path): + extract_chroot = safe_mkdtemp() + with tarfile.open(source_archive_path) as fp: + fp.extractall(extract_chroot) + elif is_zip_sdist(source_archive_path): + extract_chroot = safe_mkdtemp() + with open_zip(source_archive_path) as fp: + fp.extractall(extract_chroot) + else: + raise AssertionError( + reportable_unexpected_error_msg( + "Asked to extract a build system table from {path} which does not appear to be a " + "source archive.".format(path=source_archive_path) + ) + ) + + # We might get a Python-standard sdist, in which case the project root is at + # `-/` at the top of the archive, but we also might get some other sort of + # archive, like a GitHub source archive which does not use Python conventions. As such we just + # perform a top-down search for a project file and exit early for the highest-level such file + # found. + # TODO(John Sirois): XXX: Check if this works with VCS requirements that use Pip-proprietary + # subdirectory=YYY. + for root, dirs, files in os.walk(extract_chroot): + if any(f in ("pyproject.toml", "setup.py", "setupcfg") for f in files): + return try_(load_build_system_table(root)) + return DEFAULT_BUILD_SYSTEM_TABLE + + +@attr.s(frozen=True) +class BuildSystems(object): + resolver = attr.ib() # type: Resolver + + def determine_build_systems(self, artifacts): + # type: (Iterable[PartialArtifact]) -> Iterator[Tuple[PartialArtifact, Optional[BuildSystemTable]]] + + undetermined_artifacts = OrderedDict() # type: OrderedDict[PartialArtifact, float] + for artifact in artifacts: + if artifact.build_system_table: + yield artifact, artifact.build_system_table + elif artifact.url.is_wheel: + yield artifact, None + else: + if "file" == artifact.url.scheme: + if os.path.isdir(artifact.url.path): + cost = 0.0 + else: + # For almost all source archives this value should be <= 1 + cost = os.path.getsize(artifact.url.path) / 5.0 * 1024 * 1024 + else: + # We have no clue how big the archive is, but assume an internet fetch is 10 + # times more costly per byte than extraction from an archive alone is. + cost = 10.0 + undetermined_artifacts[artifact] = cost + + for artifact, build_system_table in iter_map_parallel( + inputs=undetermined_artifacts, + function=self._determine_build_system, + costing_function=lambda a: undetermined_artifacts[a], + result_render_function=lambda result: ( + cast("Tuple[PartialArtifact, Optional[BuildSystemTable]]", result)[0].url + ), + noun="artifact", + verb="extract build system", + verb_past="extracted build system", + ): + yield artifact, build_system_table + + def _determine_build_system(self, artifact): + # type: (PartialArtifact) -> Tuple[PartialArtifact, BuildSystemTable] + + if "file" == artifact.url.scheme and os.path.isdir(artifact.url.path): + return artifact, try_(load_build_system_table(artifact.url.path)) + + production_assert(is_sdist(artifact.url.path)) + if artifact.url.scheme == "file": + archive = artifact.url.path + else: + archive = ( + self.resolver.download_requirements( + requirements=[artifact.url.download_url], transitive=False + ) + .local_distributions[0] + .path + ) + return artifact, extract_build_system_table(archive) diff --git a/pex/resolve/configured_resolver.py b/pex/resolve/configured_resolver.py index 6e76e9e66..540a92356 100644 --- a/pex/resolve/configured_resolver.py +++ b/pex/resolve/configured_resolver.py @@ -10,7 +10,7 @@ from pex.resolve import lock_resolver from pex.resolve.lockfile.model import Lockfile from pex.resolve.resolver_configuration import PipConfiguration, ReposConfiguration, ResolverVersion -from pex.resolve.resolvers import Resolver, ResolveResult +from pex.resolve.resolvers import Downloaded, Resolver, ResolveResult from pex.result import try_ from pex.targets import Targets from pex.typing import TYPE_CHECKING @@ -115,3 +115,33 @@ def resolve_requirements( ), result_type=result_type, ) + + def download_requirements( + self, + requirements, # type: Iterable[str] + targets=Targets(), # type: Targets + pip_version=None, # type: Optional[PipVersionValue] + transitive=None, # type: Optional[bool] + extra_resolver_requirements=None, # type: Optional[Tuple[Requirement, ...]] + ): + # type: (...) -> Downloaded + return resolver.download( + targets=targets, + requirements=requirements, + allow_prereleases=False, + transitive=transitive if transitive is not None else self.pip_configuration.transitive, + indexes=self.pip_configuration.repos_configuration.indexes, + find_links=self.pip_configuration.repos_configuration.find_links, + resolver_version=self.pip_configuration.resolver_version, + network_configuration=self.pip_configuration.network_configuration, + build_configuration=self.pip_configuration.build_configuration, + max_parallel_jobs=self.pip_configuration.max_jobs, + pip_version=pip_version or self.pip_configuration.version, + resolver=self, + use_pip_config=self.pip_configuration.use_pip_config, + extra_pip_requirements=( + extra_resolver_requirements + if extra_resolver_requirements is not None + else self.pip_configuration.extra_requirements + ), + ) diff --git a/pex/resolve/lock_resolver.py b/pex/resolve/lock_resolver.py index a7277f57c..47eebf378 100644 --- a/pex/resolve/lock_resolver.py +++ b/pex/resolve/lock_resolver.py @@ -3,28 +3,222 @@ from __future__ import absolute_import +import hashlib +import os +import tarfile +from collections import OrderedDict, defaultdict + from pex.auth import PasswordDatabase, PasswordEntry +from pex.build_system import BuildSystem, BuildSystemTable +from pex.cache.dirs import CacheDir +from pex.common import open_zip, safe_mkdtemp from pex.dependency_configuration import DependencyConfiguration -from pex.dist_metadata import Requirement, is_wheel +from pex.dist_metadata import Distribution, Requirement, is_sdist, is_tar_sdist, is_wheel +from pex.exceptions import production_assert +from pex.fingerprinted_distribution import FingerprintedDistribution +from pex.interpreter import PythonInterpreter from pex.network_configuration import NetworkConfiguration -from pex.pep_427 import InstallableType +from pex.pep_427 import InstallableType, install_wheel_chroot from pex.pip.tool import PackageIndexConfiguration from pex.pip.version import PipVersionValue from pex.resolve.lock_downloader import LockDownloader -from pex.resolve.locked_resolve import LocalProjectArtifact +from pex.resolve.locked_resolve import ( + DownloadableArtifact, + LocalProjectArtifact, + LockedResolve, + Resolved, +) +from pex.resolve.lockfile.download_manager import DownloadedArtifact from pex.resolve.lockfile.model import Lockfile -from pex.resolve.lockfile.subset import subset +from pex.resolve.lockfile.subset import subset, subset_for_target from pex.resolve.requirement_configuration import RequirementConfiguration from pex.resolve.resolver_configuration import BuildConfiguration, ResolverVersion -from pex.resolve.resolvers import Resolver, ResolveResult +from pex.resolve.resolvers import ResolvedDistribution, Resolver, ResolveResult from pex.resolver import BuildAndInstallRequest, BuildRequest, InstallRequest from pex.result import Error, try_ -from pex.targets import Targets +from pex.sorted_tuple import SortedTuple +from pex.targets import Target, Targets from pex.tracer import TRACER -from pex.typing import TYPE_CHECKING +from pex.typing import TYPE_CHECKING, cast +from pex.util import CacheHelper if TYPE_CHECKING: - from typing import Iterable, Optional, Sequence, Tuple, Union + from typing import DefaultDict, Dict, Iterable, List, Optional, Sequence, Tuple, Union + + import attr # vendor:skip +else: + from pex.third_party import attr + + +@attr.s(frozen=True) +class LockedSourceDistribution(object): + target = attr.ib() # type: Target + source_artifact = attr.ib() # type: DownloadedArtifact + build_system_table = attr.ib() # type: BuildSystemTable + locked_resolves = attr.ib() # type: Tuple[LockedResolve, ...] + + +def build_locked_source_distribution( + locked_source_distribution, # type: LockedSourceDistribution + install_requests, # type: Iterable[InstallRequest] + result_type=InstallableType.INSTALLED_WHEEL_CHROOT, # type: InstallableType.Value +): + # type: (...) -> Union[ResolvedDistribution, Error] + + installed_wheels_dir = CacheDir.INSTALLED_WHEELS.path() + build_system_distributions = [] # type: List[Distribution] + for install_request in install_requests: + install_result = install_request.result(installed_wheels_dir) + installed_wheel = install_wheel_chroot( + wheel_path=install_request.wheel_path, destination=install_result.build_chroot + ) + build_system_distributions.append(Distribution.load(installed_wheel.prefix_dir)) + + result = BuildSystem.create( + interpreter=PythonInterpreter.get(), + requires=locked_source_distribution.build_system_table.requires, + resolved=build_system_distributions, + build_backend=locked_source_distribution.build_system_table.build_backend, + backend_path=locked_source_distribution.build_system_table.backend_path, + ) + if isinstance(result, Error): + return result + + source_artifact_path = locked_source_distribution.source_artifact.path + if is_sdist(source_artifact_path): + chroot = safe_mkdtemp() + if is_tar_sdist(source_artifact_path): + with tarfile.open(source_artifact_path) as tar_fp: + tar_fp.extractall(chroot) + else: + with open_zip(source_artifact_path) as zip_fp: + zip_fp.extractall(chroot) + for root, _, files in os.walk(chroot, topdown=True): + if any(f in ("setup.py", "setup.cfg", "pyproject.toml") for f in files): + project_directory = root + break + else: + return Error("TODO(John Sirois): XXX: Can't happen!") + else: + project_directory = source_artifact_path + + build_dir = os.path.join(safe_mkdtemp(), "build") + os.mkdir(build_dir) + spawned_job = try_( + result.invoke_build_hook( + project_directory, + hook_method="build_wheel", + hook_args=[build_dir], + ) + ) + distribution = spawned_job.map(lambda _: Distribution.load(build_dir)).await_result() + build_wheel_fingerprint = CacheHelper.hash(distribution.location, hasher=hashlib.sha256) + if result_type is InstallableType.INSTALLED_WHEEL_CHROOT: + install_request = InstallRequest( + target=locked_source_distribution.target, + wheel_path=distribution.location, + fingerprint=build_wheel_fingerprint, + ) + install_result = install_request.result(installed_wheels_dir) + installed_wheel = install_wheel_chroot( + wheel_path=install_request.wheel_path, destination=install_result.build_chroot + ) + distribution = Distribution.load(installed_wheel.prefix_dir) + + return ResolvedDistribution( + target=locked_source_distribution.target, + fingerprinted_distribution=FingerprintedDistribution( + distribution=distribution, fingerprint=build_wheel_fingerprint + ), + direct_requirements=SortedTuple(), + ) + + +def build_locked_source_distributions( + locked_source_distributions, # type: Sequence[LockedSourceDistribution] + lock_downloader, # type: LockDownloader + result_type=InstallableType.INSTALLED_WHEEL_CHROOT, # type: InstallableType.Value + build_configuration=BuildConfiguration(), # type: BuildConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Union[Iterable[ResolvedDistribution], Error] + + downloadable_artifacts_by_locked_source_distribution = ( + {} + ) # type: Dict[LockedSourceDistribution, Tuple[DownloadableArtifact, ...]] + subset_errors = OrderedDict() # type: OrderedDict[LockedSourceDistribution, Tuple[Error, ...]] + for locked_source_distribution in locked_source_distributions: + subset_result = subset_for_target( + target=locked_source_distribution.target, + locked_resolves=locked_source_distribution.locked_resolves, + requirements_to_resolve=tuple( + Requirement.parse(req) + for req in locked_source_distribution.build_system_table.requires + ), + build_configuration=build_configuration, + dependency_configuration=dependency_configuration, + ) + if isinstance(subset_result, Resolved): + downloadable_artifacts_by_locked_source_distribution[ + locked_source_distribution + ] = subset_result.downloadable_artifacts + elif isinstance(subset_result, tuple) and subset_result: + subset_errors[locked_source_distribution] = subset_result + if subset_errors: + return Error("TODO(John Sirois): XXX: build a subset errors message") + + downloaded_artifacts = try_( + lock_downloader.download_artifacts( + tuple( + (downloadable_artifact, locked_source_distribution.target) + for locked_source_distribution, downloadable_artifacts in downloadable_artifacts_by_locked_source_distribution.items() + for downloadable_artifact in downloadable_artifacts + ) + ) + ) + install_requests_by_locked_source_distribution = defaultdict( + list + ) # type: DefaultDict[LockedSourceDistribution, List[InstallRequest]] + resolve_errors = defaultdict( + list + ) # type: DefaultDict[LockedSourceDistribution, List[DownloadedArtifact]] + for ( + locked_source_distribution, + downloadable_artifacts, + ) in downloadable_artifacts_by_locked_source_distribution.items(): + for downloadable_artifact in downloadable_artifacts: + downloaded_artifact = downloaded_artifacts[downloadable_artifact] + if is_wheel(downloaded_artifact.path): + install_requests_by_locked_source_distribution[locked_source_distribution].append( + InstallRequest( + target=locked_source_distribution.target, + wheel_path=downloaded_artifact.path, + fingerprint=downloaded_artifact.fingerprint, + ) + ) + else: + resolve_errors[locked_source_distribution].append(downloaded_artifact) + if resolve_errors: + return Error("TODO(John Sirois): XXX: build a resolve errors message") + + # TODO(John Sirois): now we have a list of install requests needed per each source distribution + # build system, parallelize install + create pip venv + build wheel + built_distributions = [] # type: List[ResolvedDistribution] + build_errors = [] # type: List[Error] + for ( + locked_source_distribution, + install_requests, + ) in install_requests_by_locked_source_distribution.items(): + build_result = build_locked_source_distribution( + locked_source_distribution, install_requests, result_type + ) + if isinstance(build_result, ResolvedDistribution): + built_distributions.append(build_result) + else: + build_errors.append(build_result) + if build_errors: + return Error("TODO(John Sirois): XXX: build a build errors message") + return built_distributions def resolve_from_lock( @@ -102,8 +296,9 @@ def resolve_from_lock( return downloaded_artifacts with TRACER.timed("Categorizing {} downloaded artifacts".format(len(downloaded_artifacts))): - build_requests = [] - install_requests = [] + build_requests = [] # type: List[BuildRequest] + locked_build_requests = [] # type: List[LockedSourceDistribution] + install_requests = [] # type: List[InstallRequest] for resolved_subset in subset_result.subsets: for downloadable_artifact in resolved_subset.resolved.downloadable_artifacts: downloaded_artifact = downloaded_artifacts[downloadable_artifact] @@ -115,6 +310,20 @@ def resolve_from_lock( fingerprint=downloaded_artifact.fingerprint, ) ) + elif lock.lock_build_systems: + production_assert(downloadable_artifact.build_system_table is not None) + build_system_table = cast( + BuildSystemTable, downloadable_artifact.build_system_table + ) + locked_build_system_resolves = lock.build_systems[build_system_table] + locked_build_requests.append( + LockedSourceDistribution( + target=resolved_subset.target, + source_artifact=downloaded_artifact, + build_system_table=build_system_table, + locked_resolves=locked_build_system_resolves, + ) + ) else: build_requests.append( BuildRequest( @@ -123,12 +332,30 @@ def resolve_from_lock( fingerprint=downloaded_artifact.fingerprint, ) ) + build_request_count = len(build_requests) + locked_build_request_count = len(locked_build_requests) + production_assert( + ((build_request_count > 0) ^ (locked_build_request_count > 0)) + or (build_request_count == 0 and locked_build_request_count == 0) + ) with TRACER.timed( "Building {} artifacts and installing {}".format( - len(build_requests), len(build_requests) + len(install_requests) + build_request_count, build_request_count + len(install_requests) ) ): + distributions = list( + try_( + build_locked_source_distributions( + locked_build_requests, + lock_downloader, + result_type=result_type, + build_configuration=build_configuration, + dependency_configuration=dependency_configuration, + ) + ) + ) + build_and_install_request = BuildAndInstallRequest( build_requests=build_requests, install_requests=install_requests, @@ -163,7 +390,7 @@ def resolve_from_lock( # `LockedResolve.resolve` above and need not waste time (~O(100ms)) doing this again. ignore_errors = True - distributions = ( + distributions.extend( build_and_install_request.install_distributions( ignore_errors=ignore_errors, max_parallel_jobs=max_parallel_jobs, diff --git a/pex/resolve/locked_resolve.py b/pex/resolve/locked_resolve.py index a57edd272..61119ad91 100644 --- a/pex/resolve/locked_resolve.py +++ b/pex/resolve/locked_resolve.py @@ -8,6 +8,7 @@ from collections import OrderedDict, defaultdict, deque from functools import total_ordering +from pex.build_system import BuildSystemTable from pex.common import pluralize from pex.dependency_configuration import DependencyConfiguration from pex.dist_metadata import DistMetadata, Requirement, is_sdist, is_wheel @@ -88,6 +89,7 @@ class LockConfiguration(object): requires_python = attr.ib(default=()) # type: Tuple[str, ...] target_systems = attr.ib(default=()) # type: Tuple[TargetSystem.Value, ...] elide_unused_requires_dist = attr.ib(default=False) # type: bool + lock_build_systems = attr.ib(default=False) # type: bool @requires_python.validator @target_systems.validator @@ -117,13 +119,16 @@ def from_artifact_url( artifact_url, # type: ArtifactURL fingerprint, # type: Fingerprint verified=False, # type: bool + build_system_table=None, # type: Optional[BuildSystemTable] ): # type: (...) -> Union[FileArtifact, LocalProjectArtifact, VCSArtifact] if isinstance(artifact_url.scheme, VCSScheme): - return VCSArtifact.from_artifact_url( - artifact_url=artifact_url, + return VCSArtifact( + url=artifact_url, fingerprint=fingerprint, verified=verified, + vcs=artifact_url.scheme.vcs, + build_system_table=build_system_table, ) if "file" == artifact_url.scheme and os.path.isdir(artifact_url.path): @@ -133,6 +138,7 @@ def from_artifact_url( fingerprint=fingerprint, verified=verified, directory=directory, + build_system_table=build_system_table, ) filename = os.path.basename(artifact_url.path) @@ -141,6 +147,7 @@ def from_artifact_url( fingerprint=fingerprint, verified=verified, filename=filename, + build_system_table=build_system_table, ) @classmethod @@ -149,10 +156,14 @@ def from_url( url, # type: str fingerprint, # type: Fingerprint verified=False, # type: bool + build_system_table=None, # type: Optional[BuildSystemTable] ): # type: (...) -> Union[FileArtifact, LocalProjectArtifact, VCSArtifact] return cls.from_artifact_url( - artifact_url=ArtifactURL.parse(url), fingerprint=fingerprint, verified=verified + artifact_url=ArtifactURL.parse(url), + fingerprint=fingerprint, + verified=verified, + build_system_table=build_system_table, ) url = attr.ib() # type: ArtifactURL @@ -169,6 +180,19 @@ def __lt__(self, other): @attr.s(frozen=True, order=False) class FileArtifact(Artifact): filename = attr.ib() # type: str + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] + + @build_system_table.validator + def _validate_only_set_for_sdist( + self, + attribute, # type: Any + value, # type: Optional[BuildSystemTable] + ): + if value and not self.is_source: + raise ValueError( + "A build system table was provided but this is a whl artifact that does not need " + "to be built: {url}".format(url=self.url.raw_url) + ) @property def is_source(self): @@ -185,6 +209,7 @@ def parse_tags(self): @attr.s(frozen=True, order=False) class LocalProjectArtifact(Artifact): directory = attr.ib() # type: str + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] @property def is_source(self): @@ -194,28 +219,8 @@ def is_source(self): @attr.s(frozen=True, order=False) class VCSArtifact(Artifact): - @classmethod - def from_artifact_url( - cls, - artifact_url, # type: ArtifactURL - fingerprint, # type: Fingerprint - verified=False, # type: bool - ): - # type: (...) -> VCSArtifact - if not isinstance(artifact_url.scheme, VCSScheme): - raise ValueError( - "The given artifact URL is not that of a VCS artifact: {url}".format( - url=artifact_url.raw_url - ) - ) - return cls( - url=artifact_url, - fingerprint=fingerprint, - verified=verified, - vcs=artifact_url.scheme.vcs, - ) - vcs = attr.ib() # type: VCS.Value + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] @property def is_source(self): @@ -253,7 +258,7 @@ def create( return cls( pin=pin, artifact=artifact, - requires_dists=SortedTuple(requires_dists, key=str), + requires_dists=SortedTuple(requires_dists), requires_python=requires_python, additional_artifacts=SortedTuple(additional_artifacts), ) @@ -443,13 +448,18 @@ def create( return cls( pin=pin, artifact=artifact, - satisfied_direct_requirements=SortedTuple(satisfied_direct_requirements, key=str), + satisfied_direct_requirements=SortedTuple(satisfied_direct_requirements), ) pin = attr.ib() # type: Pin artifact = attr.ib() # type: Union[FileArtifact, LocalProjectArtifact, VCSArtifact] satisfied_direct_requirements = attr.ib(default=SortedTuple()) # type: SortedTuple[Requirement] + @property + def build_system_table(self): + # type: () -> Optional[BuildSystemTable] + return self.artifact.build_system_table + @attr.s(frozen=True) class Resolved(object): @@ -519,6 +529,11 @@ def most_specific(cls, resolves): if TYPE_CHECKING: + class BuildSystemOracle(Protocol): + def determine_build_systems(self, artifacts): + # type: (Iterable[PartialArtifact]) -> Iterator[Tuple[PartialArtifact, Optional[BuildSystemTable]]] + pass + class Fingerprinter(Protocol): def fingerprint(self, artifacts): # type: (Iterable[PartialArtifact]) -> Iterator[FileArtifact] @@ -532,11 +547,24 @@ def create( cls, resolved_requirements, # type: Iterable[ResolvedRequirement] dist_metadatas, # type: Iterable[DistMetadata] + build_system_oracle, # type: Optional[BuildSystemOracle] fingerprinter, # type: Fingerprinter platform_tag=None, # type: Optional[tags.Tag] ): # type: (...) -> LockedResolve + artifacts_to_lock = OrderedSet( + itertools.chain.from_iterable( + resolved_requirement.iter_artifacts_to_lock() + for resolved_requirement in resolved_requirements + ) + ) + build_system_table_by_partial_artifact = ( + dict(build_system_oracle.determine_build_systems(artifacts_to_lock)) + if build_system_oracle + else {} + ) + artifacts_to_fingerprint = OrderedSet( itertools.chain.from_iterable( resolved_requirement.iter_artifacts_to_fingerprint() @@ -569,6 +597,7 @@ def resolve_fingerprint(partial_artifact): artifact_url=partial_artifact.url, fingerprint=partial_artifact.fingerprint, verified=partial_artifact.verified, + build_system_table=build_system_table_by_partial_artifact.get(partial_artifact), ) dist_metadata_by_pin = { diff --git a/pex/resolve/locker.py b/pex/resolve/locker.py index 4991bb7dd..1bbe74adb 100644 --- a/pex/resolve/locker.py +++ b/pex/resolve/locker.py @@ -10,6 +10,8 @@ from collections import OrderedDict, defaultdict from pex import hashing +from pex.build_system import BuildSystemTable +from pex.build_system.pep_518 import load_build_system_table from pex.common import safe_mkdtemp from pex.compatibility import urlparse from pex.dist_metadata import ProjectNameAndVersion, Requirement @@ -23,6 +25,7 @@ from pex.pip.vcs import fingerprint_downloaded_vcs_archive from pex.pip.version import PipVersionValue from pex.requirements import ArchiveScheme, VCSRequirement, VCSScheme +from pex.resolve import build_systems from pex.resolve.locked_resolve import LockConfiguration, LockStyle, TargetSystem from pex.resolve.pep_691.fingerprint_service import FingerprintService from pex.resolve.pep_691.model import Endpoint @@ -34,6 +37,7 @@ ResolvedRequirement, ) from pex.resolve.resolvers import Resolver +from pex.result import try_ from pex.targets import Target from pex.typing import TYPE_CHECKING @@ -369,8 +373,13 @@ def analyze(self, line): artifact_url = build_result.url source_fingerprint = None # type: Optional[Fingerprint] verified = False + build_system_table = None # type: Optional[BuildSystemTable] if isinstance(artifact_url.scheme, VCSScheme): - source_fingerprint, archive_path = fingerprint_downloaded_vcs_archive( + ( + source_fingerprint, + build_system_table, + archive_path, + ) = fingerprint_downloaded_vcs_archive( download_dir=self._download_dir, project_name=str(build_result.pin.project_name), version=str(build_result.pin.version), @@ -390,6 +399,9 @@ def analyze(self, line): # machinery that finalizes a locks missing fingerprints will download the # artifact and hash it. if os.path.isfile(source_archive_path): + build_system_table = build_systems.extract_build_system_table( + source_archive_path + ) digest = Sha256() hashing.file_hash(source_archive_path, digest) source_fingerprint = Fingerprint.from_digest(digest) @@ -398,11 +410,15 @@ def analyze(self, line): elif "file" == artifact_url.scheme: digest = Sha256() if os.path.isfile(artifact_url.path): + build_system_table = build_systems.extract_build_system_table( + artifact_url.path + ) hashing.file_hash(artifact_url.path, digest) self._selected_path_to_pin[ os.path.basename(artifact_url.path) ] = build_result.pin else: + build_system_table = try_(load_build_system_table(artifact_url.path)) digest_local_project( directory=artifact_url.path, digest=digest, @@ -427,7 +443,10 @@ def analyze(self, line): self._resolved_requirements[build_result.pin] = ResolvedRequirement( pin=build_result.pin, artifact=PartialArtifact( - url=artifact_url, fingerprint=source_fingerprint, verified=verified + url=artifact_url, + fingerprint=source_fingerprint, + verified=verified, + build_system_table=build_system_table, ), additional_artifacts=tuple(additional_artifacts.values()), ) diff --git a/pex/resolve/lockfile/create.py b/pex/resolve/lockfile/create.py index 220092665..40d39dcf6 100644 --- a/pex/resolve/lockfile/create.py +++ b/pex/resolve/lockfile/create.py @@ -3,6 +3,7 @@ from __future__ import absolute_import +import itertools import os import shutil import tarfile @@ -11,10 +12,16 @@ from pex import hashing, resolver from pex.auth import PasswordDatabase -from pex.build_system import pep_517 +from pex.build_system import BuildSystemTable, pep_517 from pex.common import open_zip, pluralize, safe_mkdtemp from pex.dependency_configuration import DependencyConfiguration -from pex.dist_metadata import DistMetadata, ProjectNameAndVersion, is_tar_sdist, is_zip_sdist +from pex.dist_metadata import ( + Constraint, + DistMetadata, + ProjectNameAndVersion, + is_tar_sdist, + is_zip_sdist, +) from pex.fetcher import URLFetcher from pex.jobs import Job, Retain, SpawnedJob, execute_parallel from pex.orderedset import OrderedSet @@ -22,6 +29,7 @@ from pex.pip.download_observer import DownloadObserver from pex.pip.tool import PackageIndexConfiguration from pex.resolve import lock_resolver, locker, resolvers +from pex.resolve.build_systems import BuildSystems from pex.resolve.configured_resolver import ConfiguredResolver from pex.resolve.downloads import ArtifactDownloader from pex.resolve.locked_resolve import ( @@ -40,8 +48,8 @@ from pex.resolve.requirement_configuration import RequirementConfiguration from pex.resolve.resolved_requirement import Pin, ResolvedRequirement from pex.resolve.resolver_configuration import PipConfiguration -from pex.resolve.resolvers import Resolver -from pex.resolver import BuildRequest, Downloaded, ResolveObserver, WheelBuilder +from pex.resolve.resolvers import Downloaded, Resolver +from pex.resolver import BuildRequest, ResolveObserver, WheelBuilder from pex.result import Error, try_ from pex.targets import Target, Targets from pex.tracer import TRACER @@ -50,7 +58,7 @@ from pex.version import __version__ if TYPE_CHECKING: - from typing import DefaultDict, Dict, Iterable, List, Mapping, Optional, Tuple, Union + from typing import DefaultDict, Dict, Iterable, Iterator, List, Mapping, Optional, Tuple, Union import attr # vendor:skip @@ -334,6 +342,11 @@ def lock(self, downloaded): LockedResolve.create( resolved_requirements=resolved_requirements, dist_metadatas=dist_metadatas_by_target[target], + build_system_oracle=( + BuildSystems(resolver=self.resolver) + if self.lock_configuration.lock_build_systems + else None + ), fingerprinter=ArtifactDownloader( resolver=self.resolver, lock_configuration=self.lock_configuration, @@ -343,7 +356,7 @@ def lock(self, downloaded): ), platform_tag=( None - if self.lock_configuration.style == LockStyle.UNIVERSAL + if self.lock_configuration.style is LockStyle.UNIVERSAL else target.platform.tag ), ) @@ -351,15 +364,21 @@ def lock(self, downloaded): ) -def create( +@attr.s(frozen=True) +class _LockResult(object): + requirements = attr.ib() # type: Tuple[ParsedRequirement, ...] + constraints = attr.ib() # type: Tuple[Constraint, ...] + locked_resolves = attr.ib() # type: Tuple[LockedResolve, ...] + + +def _lock( lock_configuration, # type: LockConfiguration requirement_configuration, # type: RequirementConfiguration targets, # type: Targets pip_configuration, # type: PipConfiguration dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration ): - # type: (...) -> Union[Lockfile, Error] - """Create a lock file for the given resolve configurations.""" + # type: (...) -> Union[_LockResult, Error] network_configuration = pip_configuration.network_configuration parsed_requirements = tuple(requirement_configuration.parse_requirements(network_configuration)) @@ -443,21 +462,165 @@ def create( ) create_lock_download_manager.store_all() + return _LockResult(parsed_requirements, constraints, locked_resolves) + + +def _lock_build_system( + build_system_table, # type: BuildSystemTable + lock_configuration, # type: LockConfiguration + targets, # type: Targets + pip_configuration, # type: PipConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Union[Tuple[BuildSystemTable, Tuple[LockedResolve, ...]], Error] + + requirement_configuration = RequirementConfiguration(requirements=build_system_table.requires) + result = _lock( + lock_configuration, + requirement_configuration, + targets, + pip_configuration, + dependency_configuration=dependency_configuration, + ) + if isinstance(result, Error): + return result + + source_artifacts = OrderedSet( + artifact.url.download_url + for artifact in itertools.chain.from_iterable( + locked_requirement.iter_artifacts() + for locked_resolve in result.locked_resolves + for locked_requirement in locked_resolve.locked_requirements + ) + if not artifact.url.is_wheel + ) + if source_artifacts: + return Error( + "Failed to lock build backend {build_backend} which requires {requires}.\n" + "The following {packages} had source artifacts locked and recursive build system " + "locking is not supported:\n" + "{source_artifacts}".format( + build_backend=build_system_table.build_backend, + requires=", ".join(build_system_table.requires), + packages=pluralize(source_artifacts, "package"), + source_artifacts="\n".join(source_artifacts), + ) + ) + return build_system_table, result.locked_resolves + + +def _lock_build_systems( + locked_resolves, # type: Tuple[LockedResolve, ...] + lock_configuration, # type: LockConfiguration + targets, # type: Targets + pip_configuration, # type: PipConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Iterator[Union[Tuple[BuildSystemTable, Tuple[LockedResolve, ...]], Error]] + + if not lock_configuration.lock_build_systems: + return + + build_systems = OrderedSet( + artifact.build_system_table + for artifact in itertools.chain.from_iterable( + locked_requirement.iter_artifacts() + for locked_resolve in locked_resolves + for locked_requirement in locked_resolve.locked_requirements + ) + if artifact.build_system_table + ) + if not build_systems: + return + + build_system_pip_config = attr.evolve( + pip_configuration, + build_configuration=attr.evolve( + pip_configuration.build_configuration, allow_builds=False, allow_wheels=True + ), + ) + # TODO(John Sirois): Re-introduce iter_map_parallel after sorting out nested + # multiprocessing.Pool illegal usage. Currently this nets: + # File "/home/jsirois/dev/pex-tool/pex/pex/resolve/lockfile/create.py", line 588, in create + # for result in _lock_build_systems( + # File "/home/jsirois/dev/pex-tool/pex/pex/jobs.py", line 787, in iter_map_parallel + # for pid, result, elapsed_secs in pool.imap_unordered(apply_function, input_items): + # File "/home/jsirois/.pyenv/versions/3.11.10/lib/python3.11/multiprocessing/pool.py", line 873, in next + # raise value + # AssertionError: daemonic processes are not allowed to have children + for build_system_table in build_systems: + yield _lock_build_system( + build_system_table=build_system_table, + lock_configuration=lock_configuration, + targets=targets, + pip_configuration=build_system_pip_config, + dependency_configuration=dependency_configuration, + ) + + +def create( + lock_configuration, # type: LockConfiguration + requirement_configuration, # type: RequirementConfiguration + targets, # type: Targets + pip_configuration, # type: PipConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Union[Lockfile, Error] + """Create a lock file for the given resolve configurations.""" + + lock_result = try_( + _lock( + lock_configuration, + requirement_configuration, + targets, + pip_configuration, + dependency_configuration=dependency_configuration, + ) + ) + + build_system_lock_errors = [] # type: List[str] + build_systems = {} # type: Dict[BuildSystemTable, Tuple[LockedResolve, ...]] + for result in _lock_build_systems( + locked_resolves=lock_result.locked_resolves, + lock_configuration=lock_configuration, + targets=targets, + pip_configuration=pip_configuration, + dependency_configuration=dependency_configuration, + ): + if isinstance(result, Error): + build_system_lock_errors.append(str(result)) + else: + build_system_table, locked_resolves = result + build_systems[build_system_table] = locked_resolves + if build_system_lock_errors: + return Error( + "Failed to lock {count} build {systems}:\n{errors}".format( + count=len(build_system_lock_errors), + systems=pluralize(build_system_lock_errors, "system"), + errors="\n".join( + "{index}. {error}".format(index=index, error=error) + for index, error in enumerate(build_system_lock_errors, start=1) + ), + ) + ) + lock = Lockfile.create( pex_version=__version__, style=lock_configuration.style, requires_python=lock_configuration.requires_python, target_systems=lock_configuration.target_systems, + lock_build_systems=lock_configuration.lock_build_systems, pip_version=pip_configuration.version, resolver_version=pip_configuration.resolver_version, - requirements=parsed_requirements, - constraints=constraints, + requirements=lock_result.requirements, + constraints=lock_result.constraints, allow_prereleases=pip_configuration.allow_prereleases, build_configuration=pip_configuration.build_configuration, transitive=pip_configuration.transitive, excluded=dependency_configuration.excluded, overridden=dependency_configuration.all_overrides(), - locked_resolves=locked_resolves, + locked_resolves=lock_result.locked_resolves, + build_systems=build_systems, elide_unused_requires_dist=lock_configuration.elide_unused_requires_dist, ) @@ -474,11 +637,11 @@ def create( lock_resolver.resolve_from_lock( targets=check_targets, lock=lock, - resolver=configured_resolver, + resolver=ConfiguredResolver(pip_configuration=pip_configuration), indexes=pip_configuration.repos_configuration.indexes, find_links=pip_configuration.repos_configuration.find_links, resolver_version=pip_configuration.resolver_version, - network_configuration=network_configuration, + network_configuration=pip_configuration.network_configuration, password_entries=pip_configuration.repos_configuration.password_entries, build_configuration=pip_configuration.build_configuration, transitive=pip_configuration.transitive, diff --git a/pex/resolve/lockfile/json_codec.py b/pex/resolve/lockfile/json_codec.py index ddfe4bde6..0fa464365 100644 --- a/pex/resolve/lockfile/json_codec.py +++ b/pex/resolve/lockfile/json_codec.py @@ -4,8 +4,10 @@ from __future__ import absolute_import import json +from collections import defaultdict from pex import compatibility +from pex.build_system import BuildSystemTable from pex.dist_metadata import Requirement, RequirementParseError from pex.enum import Enum from pex.pep_440 import Version @@ -13,10 +15,13 @@ from pex.pip.version import PipVersion from pex.resolve.locked_resolve import ( Artifact, + FileArtifact, + LocalProjectArtifact, LockedRequirement, LockedResolve, LockStyle, TargetSystem, + VCSArtifact, ) from pex.resolve.lockfile.model import Lockfile from pex.resolve.path_mappings import PathMappings @@ -31,6 +36,7 @@ from typing import ( Any, Container, + DefaultDict, Dict, List, Mapping, @@ -340,6 +346,7 @@ def assemble_tag( requires_python=get("requires_python", list), target_systems=target_systems, elide_unused_requires_dist=elide_unused_requires_dist, + lock_build_systems=get("lock_build_systems", bool, optional=True) or False, pip_version=get_enum_value( PipVersion, "pip_version", @@ -386,7 +393,59 @@ def as_json_data( path_mappings=PathMappings(), # type: PathMappings ): # type: (...) -> Dict[str, Any] - return { + + build_systems_by_backend = defaultdict( + dict + ) # type: DefaultDict[str, Dict[BuildSystemTable, str]] + + def serialize_artifact(artifact): + # type: (Union[FileArtifact, LocalProjectArtifact, VCSArtifact]) -> Dict[str, Any] + + artifact_data = { + "url": path_mappings.maybe_canonicalize(artifact.url.download_url), + "algorithm": artifact.fingerprint.algorithm, + "hash": artifact.fingerprint.hash, + } + if artifact.build_system_table: + backend = artifact.build_system_table.build_backend + tables = build_systems_by_backend[backend] + artifact_data["build_system"] = tables.setdefault( + artifact.build_system_table, + "{backend}-{index}".format(backend=backend, index=len(tables)), + ) + return artifact_data + + def serialize_locked_resolve(locked_resolve): + # type: (LockedResolve) -> Dict[str, Any] + return { + "platform_tag": [ + locked_resolve.platform_tag.interpreter, + locked_resolve.platform_tag.abi, + locked_resolve.platform_tag.platform, + ] + if locked_resolve.platform_tag + else None, + "locked_requirements": [ + { + "project_name": str(req.pin.project_name), + # N.B.: We store the raw version so that `===` can work as intended against + # the un-normalized form of versions that are non-legacy and thus + # normalizable. + "version": req.pin.version.raw, + "requires_dists": [ + path_mappings.maybe_canonicalize(str(dependency)) + for dependency in req.requires_dists + ], + "requires_python": str(req.requires_python) if req.requires_python else None, + "artifacts": [ + serialize_artifact(artifact) for artifact in req.iter_artifacts() + ], + } + for req in locked_resolve.locked_requirements + ], + } + + lock_data = { "pex_version": lockfile.pex_version, "style": str(lockfile.style), "requires_python": list(lockfile.requires_python), @@ -411,43 +470,24 @@ def as_json_data( "excluded": [str(exclude) for exclude in lockfile.excluded], "overridden": [str(override) for override in lockfile.overridden], "locked_resolves": [ - { - "platform_tag": [ - locked_resolve.platform_tag.interpreter, - locked_resolve.platform_tag.abi, - locked_resolve.platform_tag.platform, - ] - if locked_resolve.platform_tag - else None, - "locked_requirements": [ - { - "project_name": str(req.pin.project_name), - # N.B.: We store the raw version so that `===` can work as intended against - # the un-normalized form of versions that are non-legacy and thus - # normalizable. - "version": req.pin.version.raw, - "requires_dists": [ - path_mappings.maybe_canonicalize(str(dependency)) - for dependency in req.requires_dists - ], - "requires_python": str(req.requires_python) - if req.requires_python - else None, - "artifacts": [ - { - "url": path_mappings.maybe_canonicalize(artifact.url.download_url), - "algorithm": artifact.fingerprint.algorithm, - "hash": artifact.fingerprint.hash, - } - for artifact in req.iter_artifacts() - ], - } - for req in locked_resolve.locked_requirements - ], - } - for locked_resolve in lockfile.locked_resolves + serialize_locked_resolve(locked_resolve) for locked_resolve in lockfile.locked_resolves ], "path_mappings": { path_mapping.name: path_mapping.description for path_mapping in path_mappings.mappings }, } + if build_systems_by_backend: + lock_data["build_systems"] = { + build_system_id: { + "build_backend": build_system_table.build_backend, + "requires": build_system_table.requires, + "backend_path": build_system_table.backend_path, + "locked_resolves": [ + serialize_locked_resolve(locked_resolve) + for locked_resolve in lockfile.build_systems[build_system_table] + ], + } + for build_system in build_systems_by_backend.values() + for build_system_table, build_system_id in build_system.items() + } + return lock_data diff --git a/pex/resolve/lockfile/model.py b/pex/resolve/lockfile/model.py index 36293fa34..244b54ba7 100644 --- a/pex/resolve/lockfile/model.py +++ b/pex/resolve/lockfile/model.py @@ -5,6 +5,7 @@ import os +from pex.build_system import BuildSystemTable from pex.dependency_configuration import DependencyConfiguration from pex.dist_metadata import Constraint, Requirement from pex.orderedset import OrderedSet @@ -43,6 +44,7 @@ def create( style, # type: LockStyle.Value requires_python, # type: Iterable[str] target_systems, # type: Iterable[TargetSystem.Value] + lock_build_systems, # type: bool requirements, # type: Iterable[Union[Requirement, ParsedRequirement]] constraints, # type: Iterable[Constraint] allow_prereleases, # type: bool @@ -51,6 +53,7 @@ def create( excluded, # type: Iterable[Requirement] overridden, # type: Iterable[Requirement] locked_resolves, # type: Iterable[LockedResolve] + build_systems=None, # type: Optional[Mapping[BuildSystemTable, Iterable[LockedResolve]]] source=None, # type: Optional[str] pip_version=None, # type: Optional[PipVersionValue] resolver_version=None, # type: Optional[ResolverVersion.Value] @@ -103,10 +106,11 @@ def extract_requirement(req): requires_python=SortedTuple(requires_python), target_systems=SortedTuple(target_systems), elide_unused_requires_dist=elide_unused_requires_dist, + lock_build_systems=lock_build_systems, pip_version=pip_ver, resolver_version=resolver_version or ResolverVersion.default(pip_ver), - requirements=SortedTuple(resolve_requirements, key=str), - constraints=SortedTuple(constraints, key=str), + requirements=SortedTuple(resolve_requirements), + constraints=SortedTuple(constraints), allow_prereleases=allow_prereleases, allow_wheels=build_configuration.allow_wheels, only_wheels=SortedTuple(build_configuration.only_wheels), @@ -127,6 +131,10 @@ def extract_requirement(req): ) for locked_resolve in locked_resolves ), + build_systems={ + build_system_table: SortedTuple(locked_resolves) + for build_system_table, locked_resolves in (build_systems or {}).items() + }, local_project_requirement_mapping=requirement_by_local_project_directory, source=source, ) @@ -136,6 +144,7 @@ def extract_requirement(req): requires_python = attr.ib() # type: SortedTuple[str] target_systems = attr.ib() # type: SortedTuple[TargetSystem.Value] elide_unused_requires_dist = attr.ib() # type: bool + lock_build_systems = attr.ib() # type: bool pip_version = attr.ib() # type: PipVersionValue resolver_version = attr.ib() # type: ResolverVersion.Value requirements = attr.ib() # type: SortedTuple[Requirement] @@ -153,6 +162,7 @@ def extract_requirement(req): excluded = attr.ib() # type: SortedTuple[Requirement] overridden = attr.ib() # type: SortedTuple[Requirement] locked_resolves = attr.ib() # type: SortedTuple[LockedResolve] + build_systems = attr.ib() # type: Mapping[BuildSystemTable, SortedTuple[LockedResolve]] local_project_requirement_mapping = attr.ib(eq=False) # type: Mapping[str, Requirement] source = attr.ib(default=None, eq=False) # type: Optional[str] @@ -163,6 +173,7 @@ def lock_configuration(self): requires_python=self.requires_python, target_systems=self.target_systems, elide_unused_requires_dist=self.elide_unused_requires_dist, + lock_build_systems=self.lock_build_systems, ) def build_configuration(self): diff --git a/pex/resolve/lockfile/subset.py b/pex/resolve/lockfile/subset.py index 0d8e3a49d..7505a8b68 100644 --- a/pex/resolve/lockfile/subset.py +++ b/pex/resolve/lockfile/subset.py @@ -12,7 +12,7 @@ from pex.network_configuration import NetworkConfiguration from pex.orderedset import OrderedSet from pex.requirements import LocalProjectRequirement, parse_requirement_strings -from pex.resolve.locked_resolve import Resolved +from pex.resolve.locked_resolve import LockedResolve, Resolved from pex.resolve.lockfile.model import Lockfile from pex.resolve.requirement_configuration import RequirementConfiguration from pex.resolve.resolver_configuration import BuildConfiguration @@ -43,6 +43,41 @@ class SubsetResult(object): subsets = attr.ib() # type: Tuple[Subset, ...] +def subset_for_target( + target, # type: Target + locked_resolves, # type: Iterable[LockedResolve] + requirements_to_resolve, # type: Iterable[Requirement] + constraints=(), # type: Iterable[Requirement] + source=None, # type: Optional[str] + build_configuration=BuildConfiguration(), # type: BuildConfiguration + transitive=True, # type: bool + include_all_matches=False, # type: bool + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Union[Resolved, Tuple[Error, ...]] + resolveds = [] + errors = [] + for locked_resolve in locked_resolves: + resolve_result = locked_resolve.resolve( + target, + requirements_to_resolve, + constraints=constraints, + source=source, + build_configuration=build_configuration, + transitive=transitive, + include_all_matches=include_all_matches, + dependency_configuration=dependency_configuration, + # TODO(John Sirois): Plumb `--ignore-errors` to support desired but technically + # invalid `pip-legacy-resolver` locks: + # https://github.com/pex-tool/pex/issues/1652 + ) + if isinstance(resolve_result, Resolved): + resolveds.append(resolve_result) + else: + errors.append(resolve_result) + return Resolved.most_specific(resolveds) if resolveds else tuple(errors) + + def subset( targets, # type: Targets lock, # type: Lockfile @@ -105,31 +140,21 @@ def subset( ) ): for target in targets.unique_targets(): - resolveds = [] - errors = [] - for locked_resolve in lock.locked_resolves: - resolve_result = locked_resolve.resolve( - target, - requirements_to_resolve, - constraints=constraints, - source=lock.source, - build_configuration=build_configuration, - transitive=transitive, - include_all_matches=include_all_matches, - dependency_configuration=dependency_configuration, - # TODO(John Sirois): Plumb `--ignore-errors` to support desired but technically - # invalid `pip-legacy-resolver` locks: - # https://github.com/pex-tool/pex/issues/1652 - ) - if isinstance(resolve_result, Resolved): - resolveds.append(resolve_result) - else: - errors.append(resolve_result) - - if resolveds: - resolved_by_target[target] = Resolved.most_specific(resolveds) - elif errors: - errors_by_target[target] = tuple(errors) + result = subset_for_target( + target, + locked_resolves=lock.locked_resolves, + requirements_to_resolve=requirements_to_resolve, + constraints=constraints, + source=lock.source, + build_configuration=build_configuration, + transitive=transitive, + include_all_matches=include_all_matches, + dependency_configuration=dependency_configuration, + ) + if isinstance(result, Resolved): + resolved_by_target[target] = result + elif len(result) > 0: + errors_by_target[target] = result if errors_by_target: return Error( diff --git a/pex/resolve/resolved_requirement.py b/pex/resolve/resolved_requirement.py index eb63daa6c..0571043fd 100644 --- a/pex/resolve/resolved_requirement.py +++ b/pex/resolve/resolved_requirement.py @@ -6,6 +6,7 @@ import hashlib from pex import hashing +from pex.build_system import BuildSystemTable from pex.compatibility import url_unquote, urlparse from pex.dist_metadata import ProjectNameAndVersion, Requirement, is_wheel from pex.hashing import HashlibHasher @@ -167,6 +168,7 @@ class PartialArtifact(object): url = attr.ib(converter=_convert_url) # type: ArtifactURL fingerprint = attr.ib(default=None) # type: Optional[Fingerprint] verified = attr.ib(default=False) # type: bool + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] @attr.s(frozen=True) @@ -186,3 +188,8 @@ def iter_artifacts_to_fingerprint(self): for artifact in self.iter_artifacts(): if not artifact.fingerprint: yield artifact + + def iter_artifacts_to_lock(self): + for artifact in self.iter_artifacts(): + if not artifact.url.is_wheel: + yield artifact diff --git a/pex/resolve/resolvers.py b/pex/resolve/resolvers.py index d838b0b3f..d6804745d 100644 --- a/pex/resolve/resolvers.py +++ b/pex/resolve/resolvers.py @@ -1,17 +1,19 @@ # Copyright 2022 Pex project contributors. # Licensed under the Apache License, Version 2.0 (see LICENSE). -from __future__ import absolute_import +from __future__ import absolute_import, print_function +import hashlib import itertools import os +import zipfile from abc import abstractmethod from collections import OrderedDict, defaultdict -from pex import pex_warnings +from pex import pex_warnings, targets from pex.common import pluralize from pex.dependency_configuration import DependencyConfiguration -from pex.dist_metadata import Distribution, Requirement +from pex.dist_metadata import Distribution, Requirement, is_wheel from pex.fingerprinted_distribution import FingerprintedDistribution from pex.pep_427 import InstallableType from pex.pep_503 import ProjectName @@ -20,6 +22,7 @@ from pex.sorted_tuple import SortedTuple from pex.targets import AbbreviatedPlatform, Target, Targets from pex.typing import TYPE_CHECKING +from pex.util import CacheHelper if TYPE_CHECKING: from typing import DefaultDict, Iterable, List, Optional, Tuple @@ -237,6 +240,48 @@ class ResolveResult(object): type = attr.ib() # type: InstallableType.Value +def fingerprint_path(path): + # type: (str) -> str + + # We switched from sha1 to sha256 at the transition from using `pip install --target` to + # `pip install --prefix` to serve two purposes: + # 1. Insulate the new installation scheme from the old. + # 2. Move past sha1 which was shown to have practical collision attacks in 2019. + # + # The installation scheme switch was the primary purpose and switching hashes proved a pragmatic + # insulation. If the `pip install --prefix` re-arrangement scheme evolves, then some other + # option than switching hashing algorithms will be needed, like post-fixing a running version + # integer or just mixing one into the hashed content. + # + # See: https://github.com/pex-tool/pex/issues/1655 for a general overview of these cache + # structure concerns. + hasher = hashlib.sha256 + + if os.path.isdir(path): + return CacheHelper.dir_hash(path, hasher=hasher) + return CacheHelper.hash(path, hasher=hasher) + + +@attr.s(frozen=True) +class LocalDistribution(object): + path = attr.ib() # type: str + fingerprint = attr.ib() # type: str + target = attr.ib(factory=targets.current) # type: Target + + @fingerprint.default + def _calculate_fingerprint(self): + return fingerprint_path(self.path) + + @property + def is_wheel(self): + return is_wheel(self.path) and zipfile.is_zipfile(self.path) + + +@attr.s(frozen=True) +class Downloaded(object): + local_distributions = attr.ib() # type: Tuple[LocalDistribution, ...] + + class Resolver(object): @abstractmethod def is_default_repos(self): @@ -270,3 +315,15 @@ def resolve_requirements( ): # type: (...) -> ResolveResult raise NotImplementedError() + + @abstractmethod + def download_requirements( + self, + requirements, # type: Iterable[str] + targets=Targets(), # type: Targets + pip_version=None, # type: Optional[PipVersionValue] + transitive=None, # type: Optional[bool] + extra_resolver_requirements=None, # type: Optional[Tuple[Requirement, ...]] + ): + # type: (...) -> Downloaded + raise NotImplementedError() diff --git a/pex/resolver.py b/pex/resolver.py index fdfac75aa..1af16ac39 100644 --- a/pex/resolver.py +++ b/pex/resolver.py @@ -6,14 +6,12 @@ import functools import glob -import hashlib import itertools import os import zipfile from abc import abstractmethod from collections import OrderedDict, defaultdict -from pex import targets from pex.atomic_directory import AtomicDirectory, atomic_directory from pex.auth import PasswordEntry from pex.cache.dirs import BuiltWheelDir, CacheDir @@ -37,18 +35,20 @@ from pex.resolve.requirement_configuration import RequirementConfiguration from pex.resolve.resolver_configuration import BuildConfiguration, PipLog, ResolverVersion from pex.resolve.resolvers import ( + Downloaded, + LocalDistribution, ResolvedDistribution, Resolver, ResolveResult, Unsatisfiable, Untranslatable, check_resolve, + fingerprint_path, ) from pex.targets import AbbreviatedPlatform, CompletePlatform, LocalInterpreter, Target, Targets from pex.third_party.packaging.tags import Tag from pex.tracer import TRACER from pex.typing import TYPE_CHECKING -from pex.util import CacheHelper from pex.variables import ENV if TYPE_CHECKING: @@ -280,28 +280,6 @@ class IntegrityError(Exception): pass -def fingerprint_path(path): - # type: (str) -> str - - # We switched from sha1 to sha256 at the transition from using `pip install --target` to - # `pip install --prefix` to serve two purposes: - # 1. Insulate the new installation scheme from the old. - # 2. Move past sha1 which was shown to have practical collision attacks in 2019. - # - # The installation scheme switch was the primary purpose and switching hashes proved a pragmatic - # insulation. If the `pip install --prefix` re-arrangement scheme evolves, then some other - # option than switching hashing algorithms will be needed, like post-fixing a running version - # integer or just mixing one into the hashed content. - # - # See: https://github.com/pex-tool/pex/issues/1655 for a general overview of these cache - # structure concerns. - hasher = hashlib.sha256 - - if os.path.isdir(path): - return CacheHelper.dir_hash(path, hasher=hasher) - return CacheHelper.hash(path, hasher=hasher) - - @attr.s(frozen=True) class BuildRequest(object): @classmethod @@ -1271,26 +1249,6 @@ def _download_internal( return local_projects, download_results -@attr.s(frozen=True) -class LocalDistribution(object): - path = attr.ib() # type: str - fingerprint = attr.ib() # type: str - target = attr.ib(factory=targets.current) # type: Target - - @fingerprint.default - def _calculate_fingerprint(self): - return fingerprint_path(self.path) - - @property - def is_wheel(self): - return is_wheel(self.path) and zipfile.is_zipfile(self.path) - - -@attr.s(frozen=True) -class Downloaded(object): - local_distributions = attr.ib() # type: Tuple[LocalDistribution, ...] - - class ResolveObserver(object): @abstractmethod def observe_download( diff --git a/tests/build_system/test_pep_518.py b/tests/build_system/test_pep_518.py index 677b1bc6e..13cf317ad 100644 --- a/tests/build_system/test_pep_518.py +++ b/tests/build_system/test_pep_518.py @@ -5,8 +5,7 @@ import subprocess from textwrap import dedent -from pex.build_system import pep_518 -from pex.build_system.pep_518 import BuildSystem +from pex.build_system import BuildSystem, pep_518 from pex.common import touch from pex.pep_503 import ProjectName from pex.resolve.configured_resolver import ConfiguredResolver diff --git a/tests/integration/build_system/test_pep_518.py b/tests/integration/build_system/test_pep_518.py index fadf61f89..5cdb83188 100644 --- a/tests/integration/build_system/test_pep_518.py +++ b/tests/integration/build_system/test_pep_518.py @@ -4,7 +4,8 @@ import os.path import subprocess -from pex.build_system.pep_518 import BuildSystem, load_build_system +from pex.build_system import BuildSystem +from pex.build_system.pep_518 import load_build_system from pex.pip.version import PipVersion from pex.resolve.configured_resolver import ConfiguredResolver from pex.resolve.resolver_configuration import PipConfiguration, ReposConfiguration diff --git a/tests/integration/cli/commands/test_export.py b/tests/integration/cli/commands/test_export.py index 0a5d4423b..c1ab19bc2 100644 --- a/tests/integration/cli/commands/test_export.py +++ b/tests/integration/cli/commands/test_export.py @@ -46,6 +46,7 @@ requires_python=SortedTuple(), target_systems=SortedTuple(), elide_unused_requires_dist=False, + lock_build_systems=False, pip_version=PipVersion.DEFAULT, resolver_version=ResolverVersion.PIP_2020, requirements=SortedTuple([Requirement.parse("ansicolors")]), @@ -87,6 +88,7 @@ ) ] ), + build_systems={}, local_project_requirement_mapping={}, ) diff --git a/tests/integration/cli/commands/test_export_subset.py b/tests/integration/cli/commands/test_export_subset.py index 082fd18ae..02b0ae53c 100644 --- a/tests/integration/cli/commands/test_export_subset.py +++ b/tests/integration/cli/commands/test_export_subset.py @@ -59,7 +59,7 @@ def test_full( if sys.version_info[0] == 2: expected_requirements.append(Requirement.parse(to_unicode("enum34==1.1.10"))) - assert sorted(expected_requirements, key=str) == sorted(actual_requirements, key=str) + assert sorted(expected_requirements) == sorted(actual_requirements) def test_subset( diff --git a/tests/integration/cli/commands/test_lock_dependency_groups.py b/tests/integration/cli/commands/test_lock_dependency_groups.py index 3c414d883..b08c8d031 100644 --- a/tests/integration/cli/commands/test_lock_dependency_groups.py +++ b/tests/integration/cli/commands/test_lock_dependency_groups.py @@ -46,10 +46,7 @@ def test_lock_dependency_groups(tmpdir): ).assert_success() lockfile = json_codec.load(lock) - assert ( - SortedTuple((req("cowsay==5.0"), req("ansicolors==1.1.8")), key=str) - == lockfile.requirements - ) + assert SortedTuple((req("cowsay==5.0"), req("ansicolors==1.1.8"))) == lockfile.requirements assert 1 == len(lockfile.locked_resolves) locked_requirements = lockfile.locked_resolves[0].locked_requirements assert sorted( diff --git a/tests/integration/test_locked_resolve.py b/tests/integration/test_locked_resolve.py index 4f33872df..30111b30d 100644 --- a/tests/integration/test_locked_resolve.py +++ b/tests/integration/test_locked_resolve.py @@ -13,7 +13,8 @@ from pex.resolve.lockfile.create import LockObserver from pex.resolve.resolved_requirement import Pin from pex.resolve.resolver_configuration import PipConfiguration -from pex.resolver import Downloaded, LocalDistribution, WheelBuilder +from pex.resolve.resolvers import Downloaded, LocalDistribution +from pex.resolver import WheelBuilder from pex.typing import TYPE_CHECKING from pex.util import CacheHelper from testing.resolve import normalize_locked_resolve diff --git a/tests/resolve/lockfile/test_json_codec.py b/tests/resolve/lockfile/test_json_codec.py index 66eeb03f7..beb830aed 100644 --- a/tests/resolve/lockfile/test_json_codec.py +++ b/tests/resolve/lockfile/test_json_codec.py @@ -41,6 +41,7 @@ def test_roundtrip(tmpdir): style=LockStyle.STRICT, requires_python=(), target_systems=(), + lock_build_systems=False, pip_version=PipVersion.VENDORED, resolver_version=ResolverVersion.PIP_2020, requirements=( diff --git a/tests/resolve/test_locked_resolve.py b/tests/resolve/test_locked_resolve.py index b60a612fa..ccf075805 100644 --- a/tests/resolve/test_locked_resolve.py +++ b/tests/resolve/test_locked_resolve.py @@ -833,6 +833,7 @@ def fingerprint(self, _artifacts): locked_resolve = LockedResolve.create( resolved_requirements=(), dist_metadatas=(), + build_system_oracle=None, fingerprinter=DevNullFingerprinter(), ) assert Resolved(