diff --git a/3rdparty/python/requirements.txt b/3rdparty/python/requirements.txt
index d3198c26204..1345d5da25e 100644
--- a/3rdparty/python/requirements.txt
+++ b/3rdparty/python/requirements.txt
@@ -12,7 +12,7 @@ Markdown==2.1.1
packaging==16.8
parameterized==0.6.1
pathspec==0.5.9
-pex==2.0.3
+pex==1.6.12
psutil==5.6.3
Pygments==2.3.1
pyopenssl==17.3.0
diff --git a/pants.travis-ci.ini b/pants.travis-ci.ini
index 8727005309f..6ee4c68f9bb 100644
--- a/pants.travis-ci.ini
+++ b/pants.travis-ci.ini
@@ -4,22 +4,18 @@
# Turn off all nailgun use.
execution_strategy: subprocess
-# If we use typical default process parallelism tied to core count, we see too many cores under
-# travis and either get oomkilled from launching too many processes with too much total memory
-# overhead or else just generally thrash the container and slow things down.
-travis_parallelism: 4
-
[compile.rsc]
-worker_count: %(travis_parallelism)s
-
-[python-setup]
-resolver_jobs: %(travis_parallelism)s
+# If we use the default of 1 worker per core, we see too many cores under travis
+# and get oomkilled from launching too many workers with too much total memory
+# overhead.
+worker_count: 4
[test.pytest]
# NB: We set a maximum timeout of 9.8 minutes to fail before hitting Travis' 10 minute timeout (which
# doesn't give us useful debug info).
timeout_maximum: 590
+
[test.junit]
# NB: See `test.pytest`.
timeout_maximum: 540
diff --git a/src/python/pants/backend/python/rules/download_pex_bin.py b/src/python/pants/backend/python/rules/download_pex_bin.py
index 5e24437cddf..0caebcb90a4 100644
--- a/src/python/pants/backend/python/rules/download_pex_bin.py
+++ b/src/python/pants/backend/python/rules/download_pex_bin.py
@@ -59,8 +59,8 @@ def create_execute_request(self,
@rule
async def download_pex_bin() -> DownloadedPexBin:
# TODO: Inject versions and digests here through some option, rather than hard-coding it.
- url = 'https://github.com/pantsbuild/pex/releases/download/v2.0.3/pex'
- digest = Digest('183a14145553186ca1c0f2877e5eb3a1d7504501f711bb7b84b281342ffbd5ce', 2427459)
+ url = 'https://github.com/pantsbuild/pex/releases/download/v1.6.12/pex'
+ digest = Digest('ce64cb72cd23d2123dd48126af54ccf2b718d9ecb98c2ed3045ed1802e89e7e1', 1842359)
snapshot = await Get(Snapshot, UrlToFetch(url, digest))
return DownloadedPexBin(executable=snapshot.files[0], directory_digest=snapshot.directory_digest)
diff --git a/src/python/pants/backend/python/rules/pex.py b/src/python/pants/backend/python/rules/pex.py
index a5470b3edca..3382e7e2c9d 100644
--- a/src/python/pants/backend/python/rules/pex.py
+++ b/src/python/pants/backend/python/rules/pex.py
@@ -97,8 +97,6 @@ async def create_pex(
interpreter constraints."""
argv = ["--output-file", request.output_filename]
- if python_setup.resolver_jobs:
- argv.extend(["--jobs", python_setup.resolver_jobs])
if request.entry_point is not None:
argv.extend(["--entry-point", request.entry_point])
argv.extend(request.interpreter_constraints.generate_pex_arg_list())
diff --git a/src/python/pants/backend/python/subsystems/pex_build_util.py b/src/python/pants/backend/python/subsystems/pex_build_util.py
index bc2ce58065e..56620a3cc7f 100644
--- a/src/python/pants/backend/python/subsystems/pex_build_util.py
+++ b/src/python/pants/backend/python/subsystems/pex_build_util.py
@@ -7,8 +7,9 @@
from pathlib import Path
from typing import Callable, Sequence, Set
+from pex.fetcher import Fetcher
from pex.pex_builder import PEXBuilder
-from pex.resolver import resolve_multi
+from pex.resolver import resolve
from pex.util import DistributionHelper
from twitter.common.collections import OrderedSet
@@ -25,6 +26,7 @@
from pants.build_graph.files import Files
from pants.build_graph.target import Target
from pants.subsystem.subsystem import Subsystem
+from pants.util.collections import assert_single_element
from pants.util.contextutil import temporary_file
@@ -175,62 +177,112 @@ def __init__(self,
def add_requirement_libs_from(self, req_libs, platforms=None):
"""Multi-platform dependency resolution for PEX files.
+ :param builder: Dump the requirements into this builder.
+ :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
:param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
+ :param log: Use this logger.
:param platforms: A list of :class:`Platform`s to resolve requirements for.
Defaults to the platforms specified by PythonSetup.
"""
reqs = [req for req_lib in req_libs for req in req_lib.requirements]
self.add_resolved_requirements(reqs, platforms=platforms)
- def resolve_distributions(self, reqs, platforms=None):
- """Multi-platform dependency resolution.
+ class SingleDistExtractionError(Exception): pass
- :param reqs: A list of :class:`PythonRequirement` to resolve.
- :param platforms: A list of platform strings to resolve requirements for.
- Defaults to the platforms specified by PythonSetup.
- :returns: List of :class:`pex.resolver.ResolvedDistribution` instances meeting requirements for
- the given platforms.
+ def extract_single_dist_for_current_platform(self, reqs, dist_key):
+ """Resolve a specific distribution from a set of requirements matching the current platform.
+
+ :param list reqs: A list of :class:`PythonRequirement` to resolve.
+ :param str dist_key: The value of `distribution.key` to match for a `distribution` from the
+ resolved requirements.
+ :return: The single :class:`pkg_resources.Distribution` matching `dist_key`.
+ :raises: :class:`self.SingleDistExtractionError` if no dists or multiple dists matched the given
+ `dist_key`.
"""
+ distributions = self._resolve_distributions_by_platform(reqs, platforms=['current'])
+ try:
+ matched_dist = assert_single_element(list(
+ dist
+ for _, dists in distributions.items()
+ for dist in dists
+ if dist.key == dist_key
+ ))
+ except (StopIteration, ValueError) as e:
+ raise self.SingleDistExtractionError(
+ f"Exactly one dist was expected to match name {dist_key} in requirements {reqs}: {e!r}"
+ )
+ return matched_dist
+
+ def _resolve_distributions_by_platform(self, reqs, platforms):
deduped_reqs = OrderedSet(reqs)
find_links = OrderedSet()
for req in deduped_reqs:
+ self._log.debug(f' Dumping requirement: {req}')
+ self._builder.add_requirement(str(req.requirement))
if req.repository:
find_links.add(req.repository)
- return self._resolve_multi(deduped_reqs, platforms=platforms, find_links=find_links)
+ # Resolve the requirements into distributions.
+ distributions = self._resolve_multi(self._builder.interpreter, deduped_reqs, platforms,
+ find_links)
+ return distributions
def add_resolved_requirements(self, reqs, platforms=None):
"""Multi-platform dependency resolution for PEX files.
- :param reqs: A list of :class:`PythonRequirement`s to resolve.
- :param platforms: A list of platform strings to resolve requirements for.
+ :param builder: Dump the requirements into this builder.
+ :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
+ :param reqs: A list of :class:`PythonRequirement` to resolve.
+ :param log: Use this logger.
+ :param platforms: A list of :class:`Platform`s to resolve requirements for.
Defaults to the platforms specified by PythonSetup.
"""
- for resolved_dist in self.resolve_distributions(reqs, platforms=platforms):
- requirement = resolved_dist.requirement
- self._log.debug(f' Dumping requirement: {requirement}')
- self._builder.add_requirement(str(requirement))
+ distributions = self._resolve_distributions_by_platform(reqs, platforms=platforms)
+ locations = set()
+ for platform, dists in distributions.items():
+ for dist in dists:
+ if dist.location not in locations:
+ self._log.debug(f' Dumping distribution: .../{os.path.basename(dist.location)}')
+ self.add_distribution(dist)
+ locations.add(dist.location)
+
+ def _resolve_multi(self, interpreter, requirements, platforms, find_links):
+ """Multi-platform dependency resolution for PEX files.
- distribution = resolved_dist.distribution
- self._log.debug(f' Dumping distribution: .../{os.path.basename(distribution.location)}')
- self.add_distribution(distribution)
+ Returns a list of distributions that must be included in order to satisfy a set of requirements.
+ That may involve distributions for multiple platforms.
- def _resolve_multi(self, requirements, platforms=None, find_links=None):
+ :param interpreter: The :class:`PythonInterpreter` to resolve for.
+ :param requirements: A list of :class:`PythonRequirement` objects to resolve.
+ :param platforms: A list of :class:`Platform`s to resolve for.
+ :param find_links: Additional paths to search for source packages during resolution.
+ :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
+ to satisfy the requirements on that platform.
+ """
python_setup = self._python_setup_subsystem
python_repos = self._python_repos_subsystem
platforms = platforms or python_setup.platforms
- find_links = list(find_links) if find_links else []
- find_links.extend(python_repos.repos)
-
- return resolve_multi(
- requirements=[str(req.requirement) for req in requirements],
- interpreters=[self._builder.interpreter],
- indexes=python_repos.indexes,
- find_links=find_links,
- platforms=platforms,
- cache=python_setup.resolver_cache_dir,
- allow_prereleases=python_setup.resolver_allow_prereleases,
- max_parallel_jobs=python_setup.resolver_jobs)
+ find_links = find_links or []
+ distributions = {}
+ fetchers = python_repos.get_fetchers()
+ fetchers.extend(Fetcher([path]) for path in find_links)
+
+ for platform in platforms:
+ requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir,
+ str(interpreter.identity))
+ resolved_dists = resolve(
+ requirements=[str(req.requirement) for req in requirements],
+ interpreter=interpreter,
+ fetchers=fetchers,
+ platform=platform,
+ context=python_repos.get_network_context(),
+ cache=requirements_cache_dir,
+ cache_ttl=python_setup.resolver_cache_ttl,
+ allow_prereleases=python_setup.resolver_allow_prereleases,
+ use_manylinux=python_setup.use_manylinux)
+ distributions[platform] = [resolved_dist.distribution for resolved_dist in resolved_dists]
+
+ return distributions
def add_sources_from(self, tgt: Target) -> None:
dump_source = _create_source_dumper(self._builder, tgt)
diff --git a/src/python/pants/backend/python/subsystems/python_native_code.py b/src/python/pants/backend/python/subsystems/python_native_code.py
index b9309a59ebb..0c391cb2457 100644
--- a/src/python/pants/backend/python/subsystems/python_native_code.py
+++ b/src/python/pants/backend/python/subsystems/python_native_code.py
@@ -9,10 +9,12 @@
from pants.backend.native.subsystems.native_toolchain import NativeToolchain
from pants.backend.native.targets.native_library import NativeLibrary
+from pants.backend.python.python_requirement import PythonRequirement
from pants.backend.python.subsystems import pex_build_util
from pants.backend.python.subsystems.python_setup import PythonSetup
from pants.backend.python.targets.python_distribution import PythonDistribution
from pants.base.exceptions import IncompatiblePlatformsError
+from pants.binaries.executable_pex_tool import ExecutablePexTool
from pants.engine.rules import optionable_rule, rule
from pants.subsystem.subsystem import Subsystem
from pants.util.memo import memoized_property
@@ -123,6 +125,25 @@ def check_build_for_current_platform_only(self, targets):
))
+class BuildSetupRequiresPex(ExecutablePexTool):
+ options_scope = 'build-setup-requires-pex'
+
+ @classmethod
+ def register_options(cls, register):
+ super().register_options(register)
+ register('--setuptools-version', advanced=True, fingerprint=True, default='40.6.3',
+ help='The setuptools version to use when executing `setup.py` scripts.')
+ register('--wheel-version', advanced=True, fingerprint=True, default='0.32.3',
+ help='The wheel version to use when executing `setup.py` scripts.')
+
+ @property
+ def base_requirements(self):
+ return [
+ PythonRequirement('setuptools=={}'.format(self.get_options().setuptools_version)),
+ PythonRequirement('wheel=={}'.format(self.get_options().wheel_version)),
+ ]
+
+
@dataclass(frozen=True)
class PexBuildEnvironment:
cpp_flags: Tuple[str, ...]
diff --git a/src/python/pants/backend/python/subsystems/python_repos.py b/src/python/pants/backend/python/subsystems/python_repos.py
index a379e512613..ddc6754d1c5 100644
--- a/src/python/pants/backend/python/subsystems/python_repos.py
+++ b/src/python/pants/backend/python/subsystems/python_repos.py
@@ -1,7 +1,59 @@
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
+import logging
+
+from pex.fetcher import Fetcher, PyPIFetcher
+from pex.http import RequestsContext, StreamFilelike, requests
+
from pants.subsystem.subsystem import Subsystem
+from pants.util.memo import memoized_method
+
+
+logger = logging.getLogger(__name__)
+
+
+# TODO: These methods of RequestsContext are monkey-patched out to work around
+# https://github.com/pantsbuild/pex/issues/26: we should upstream a fix for this.
+_REQUESTS_TIMEOUTS = (15, 30)
+
+
+def _open_monkey(self, link):
+ # requests does not support file:// -- so we must short-circuit manually
+ if link.local:
+ return open(link.local_path, 'rb') # noqa: T802
+ for attempt in range(self._max_retries + 1):
+ try:
+ return StreamFilelike(self._session.get(
+ link.url, verify=self._verify, stream=True, headers={'User-Agent': self.USER_AGENT},
+ timeout=_REQUESTS_TIMEOUTS),
+ link)
+ except requests.exceptions.ReadTimeout:
+ # Connect timeouts are handled by the HTTPAdapter, unfortunately read timeouts are not
+ # so we'll retry them ourselves.
+ logger.log('Read timeout trying to fetch %s, retrying. %d retries remain.' % (
+ link.url,
+ self._max_retries - attempt))
+ except requests.exceptions.RequestException as e:
+ raise self.Error(e)
+
+ raise self.Error(
+ requests.packages.urllib3.exceptions.MaxRetryError(
+ None,
+ link,
+ 'Exceeded max retries of %d' % self._max_retries))
+
+
+def _resolve_monkey(self, link):
+ return link.wrap(self._session.head(
+ link.url, verify=self._verify, allow_redirects=True,
+ headers={'User-Agent': self.USER_AGENT},
+ timeout=_REQUESTS_TIMEOUTS,
+ ).url)
+
+
+RequestsContext.open = _open_monkey
+RequestsContext.resolve = _resolve_monkey
class PythonRepos(Subsystem):
@@ -23,3 +75,15 @@ def repos(self):
@property
def indexes(self):
return self.get_options().indexes
+
+ @memoized_method
+ def get_fetchers(self):
+ fetchers = []
+ fetchers.extend(Fetcher([url]) for url in self.repos)
+ fetchers.extend(PyPIFetcher(url) for url in self.indexes)
+ return fetchers
+
+ @memoized_method
+ def get_network_context(self):
+ # TODO(wickman): Add retry, conn_timeout, threads, etc configuration here.
+ return RequestsContext()
diff --git a/src/python/pants/backend/python/subsystems/python_setup.py b/src/python/pants/backend/python/subsystems/python_setup.py
index a5738ee066b..318cdd8f0e5 100644
--- a/src/python/pants/backend/python/subsystems/python_setup.py
+++ b/src/python/pants/backend/python/subsystems/python_setup.py
@@ -44,6 +44,10 @@ def register_options(cls, register):
register('--resolver-cache-dir', advanced=True, default=None, metavar='
',
help='The parent directory for the requirement resolver cache. '
'If unspecified, a standard path under the workdir is used.')
+ register('--resolver-cache-ttl', advanced=True, type=int, metavar='',
+ default=10 * 365 * 86400, # 10 years.
+ help='The time in seconds before we consider re-resolving an open-ended requirement, '
+ 'e.g. "flask>=0.2" if a matching distribution is available on disk.')
register('--resolver-allow-prereleases', advanced=True, type=bool, default=UnsetBool,
fingerprint=True, help='Whether to include pre-releases when resolving requirements.')
register('--artifact-cache-dir', advanced=True, default=None, metavar='',
@@ -56,8 +60,9 @@ def register_options(cls, register):
'"" (the contents of the PATH env var), '
'"" (paths in the PEX_PYTHON_PATH variable in a pexrc file), '
'"" (all python versions under $(pyenv root)/versions).')
- register('--resolver-jobs', type=int, default=None, advanced=True, fingerprint=True,
- help='The maximum number of concurrent jobs to resolve wheels with.')
+ register('--resolver-use-manylinux', advanced=True, type=bool, default=True, fingerprint=True,
+ help='Whether to consider manylinux wheels when resolving requirements for linux '
+ 'platforms.')
@property
def interpreter_constraints(self):
@@ -100,13 +105,17 @@ def resolver_cache_dir(self):
return (self.get_options().resolver_cache_dir or
os.path.join(self.scratch_dir, 'resolved_requirements'))
+ @property
+ def resolver_cache_ttl(self):
+ return self.get_options().resolver_cache_ttl
+
@property
def resolver_allow_prereleases(self):
return self.get_options().resolver_allow_prereleases
@property
- def resolver_jobs(self):
- return self.get_options().resolver_jobs
+ def use_manylinux(self):
+ return self.get_options().resolver_use_manylinux
@property
def artifact_cache_dir(self):
diff --git a/src/python/pants/backend/python/targets/unpacked_whls.py b/src/python/pants/backend/python/targets/unpacked_whls.py
index f960c6dcd1d..960c25396bd 100644
--- a/src/python/pants/backend/python/targets/unpacked_whls.py
+++ b/src/python/pants/backend/python/targets/unpacked_whls.py
@@ -6,7 +6,6 @@
from twitter.common.collections import maybe_list
from pants.backend.python.targets.import_wheels_mixin import ImportWheelsMixin
-from pants.base.deprecated import deprecated_conditional
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
from pants.build_graph.target import Target
@@ -49,21 +48,15 @@ def __init__(self, module_name, libraries=None, include_patterns=None, exclude_p
:param compatibility: Python interpreter constraints used to create the pex for the requirement
target. If unset, the default interpreter constraints are used. This
argument is unnecessary unless the native code depends on libpython.
- :param bool within_data_subdir: If True, descend into '-.data/' when matching
- `include_patterns`. For python wheels which declare any non-code
- data, this is usually needed to extract that without manually
- specifying the relative path, including the package version. For
- example, when `data_files` is used in a setup.py,
- `within_data_subdir=True` will allow specifying
- `include_patterns` matching exactly what is specified in the
- setup.py.
+ :param str within_data_subdir: If provided, descend into '-.data/' when
+ matching `include_patterns`. For python wheels which declare any
+ non-code data, this is usually needed to extract that without
+ manually specifying the relative path, including the package
+ version. For example, when `data_files` is used in a setup.py,
+ `within_data_subdir='data'` will allow specifying
+ `include_patterns` matching exactly what is specified in the
+ setup.py.
"""
- deprecated_conditional(
- lambda: type(within_data_subdir) not in (bool, type(None)),
- removal_version='1.26.0.dev2',
- entity_description='A non-boolean value for `within_data_subdir`',
- hint_message='The location of the .data subdirectory will be inferred from the module name!',
- )
payload = payload or Payload()
payload.add_fields({
'library_specs': PrimitiveField(libraries or ()),
diff --git a/src/python/pants/backend/python/tasks/BUILD b/src/python/pants/backend/python/tasks/BUILD
index b4e497a5b2a..7cf50ba7a01 100644
--- a/src/python/pants/backend/python/tasks/BUILD
+++ b/src/python/pants/backend/python/tasks/BUILD
@@ -27,7 +27,6 @@ python_library(
'src/python/pants/engine:rules',
'src/python/pants/engine:selectors',
'src/python/pants/invalidation',
- 'src/python/pants/python',
'src/python/pants/task',
'src/python/pants/util:contextutil',
'src/python/pants/util:dirutil',
diff --git a/src/python/pants/backend/python/tasks/build_local_python_distributions.py b/src/python/pants/backend/python/tasks/build_local_python_distributions.py
index 0dc7e839e02..fbfc0b3a4eb 100644
--- a/src/python/pants/backend/python/tasks/build_local_python_distributions.py
+++ b/src/python/pants/backend/python/tasks/build_local_python_distributions.py
@@ -5,7 +5,6 @@
import os
import re
import shutil
-from pathlib import Path
from pex import pep425tags
from pex.interpreter import PythonInterpreter
@@ -14,15 +13,18 @@
from pants.backend.native.tasks.link_shared_libraries import SharedLibrary
from pants.backend.python.python_requirement import PythonRequirement
from pants.backend.python.subsystems.pex_build_util import is_local_python_dist
-from pants.backend.python.subsystems.python_native_code import PythonNativeCode
+from pants.backend.python.subsystems.python_native_code import (
+ BuildSetupRequiresPex,
+ PythonNativeCode,
+)
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.base.workunit import WorkUnitLabel
from pants.build_graph.address import Address
-from pants.python.setup_py_runner import SetupPyRunner
from pants.task.task import Task
from pants.util.collections import assert_single_element
+from pants.util.contextutil import pushd
from pants.util.dirutil import safe_mkdir_for, split_basename_and_dirname
from pants.util.memo import memoized_property
from pants.util.strutil import safe_shlex_join
@@ -65,7 +67,7 @@ def implementation_version(cls):
@classmethod
def subsystem_dependencies(cls):
return super().subsystem_dependencies() + (
- SetupPyRunner.Factory.scoped(cls),
+ BuildSetupRequiresPex.scoped(cls),
PythonNativeCode.scoped(cls),
)
@@ -75,13 +77,9 @@ class BuildLocalPythonDistributionsError(TaskError): pass
def _python_native_code_settings(self):
return PythonNativeCode.scoped_instance(self)
- def _build_setup_py_runner(self, extra_reqs=None, interpreter=None, pex_file_path=None):
- return SetupPyRunner.Factory.create(
- scope=self,
- extra_reqs=extra_reqs,
- interpreter=interpreter,
- pex_file_path=pex_file_path
- )
+ @memoized_property
+ def _build_setup_requires_pex_settings(self):
+ return BuildSetupRequiresPex.scoped_instance(self)
# TODO: This should probably be made into an @classproperty (see PR #5901).
@property
@@ -211,23 +209,24 @@ def _prepare_and_create_dist(self, interpreter, shared_libs_product, versioned_t
'Installing setup requirements: {}\n\n'
.format([req.key for req in setup_reqs_to_resolve]))
- pex_file_path = os.path.join(
+ setup_reqs_pex_path = os.path.join(
setup_requires_dir,
- f'setup-py-runner-{versioned_target_fingerprint}.pex')
- setup_py_runner = self._build_setup_py_runner(
- interpreter=interpreter, extra_reqs=setup_reqs_to_resolve, pex_file_path=pex_file_path)
- self.context.log.debug(f'Using pex file as setup.py interpreter: {setup_py_runner}')
+ f'setup-requires-{versioned_target_fingerprint}.pex')
+ setup_requires_pex = self._build_setup_requires_pex_settings.bootstrap(
+ interpreter, setup_reqs_pex_path, extra_reqs=setup_reqs_to_resolve)
+ self.context.log.debug('Using pex file as setup.py interpreter: {}'
+ .format(setup_requires_pex.path()))
self._create_dist(
dist_target,
dist_output_dir,
- setup_py_runner,
+ setup_requires_pex,
versioned_target_fingerprint,
is_platform_specific)
# NB: "snapshot" refers to a "snapshot release", not a Snapshot.
def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_specific):
- """Create a command line to generate a wheel via `setup.py`.
+ """Create a command line to pass to :class:`SetupPyRunner`.
Note that distutils will convert `snapshot_fingerprint` into a string suitable for a version
tag. Currently for versioned target fingerprints, this seems to convert all punctuation into
@@ -245,7 +244,8 @@ def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_
dist_dir_args = ['--dist-dir', self._DIST_OUTPUT_DIR]
- return (egg_info_snapshot_tag_args +
+ return (['setup.py'] +
+ egg_info_snapshot_tag_args +
bdist_whl_args +
platform_args +
dist_dir_args)
@@ -253,7 +253,7 @@ def _generate_snapshot_bdist_wheel_argv(self, snapshot_fingerprint, is_platform_
def _create_dist(self,
dist_tgt,
dist_target_dir,
- setup_py_runner,
+ setup_requires_pex,
snapshot_fingerprint,
is_platform_specific):
"""Create a .whl file for the specified python_distribution target."""
@@ -262,17 +262,24 @@ def _create_dist(self,
setup_py_snapshot_version_argv = self._generate_snapshot_bdist_wheel_argv(
snapshot_fingerprint, is_platform_specific)
- cmd = safe_shlex_join(setup_py_runner.cmdline(setup_py_snapshot_version_argv))
+ cmd = safe_shlex_join(setup_requires_pex.cmdline(setup_py_snapshot_version_argv))
with self.context.new_workunit('setup.py', cmd=cmd, labels=[WorkUnitLabel.TOOL]) as workunit:
- try:
- setup_py_runner.run_setup_command(source_dir=Path(dist_target_dir),
- setup_command=setup_py_snapshot_version_argv,
- stdout=workunit.output('stdout'),
- stderr=workunit.output('stderr'))
- except SetupPyRunner.CommandFailure as e:
- raise self.BuildLocalPythonDistributionsError(
- f"Installation of python distribution from target {dist_tgt} into directory "
- f"{dist_target_dir} failed using the host system's compiler and linker: {e}")
+ with pushd(dist_target_dir):
+ result = setup_requires_pex.run(args=setup_py_snapshot_version_argv,
+ stdout=workunit.output('stdout'),
+ stderr=workunit.output('stderr'))
+ if result != 0:
+ raise self.BuildLocalPythonDistributionsError(
+ "Installation of python distribution from target {target} into directory {into_dir} "
+ "failed (return value of run() was: {rc!r}).\n"
+ "The pex with any requirements is located at: {interpreter}.\n"
+ "The host system's compiler and linker were used.\n"
+ "The setup command was: {command}."
+ .format(target=dist_tgt,
+ into_dir=dist_target_dir,
+ rc=result,
+ interpreter=setup_requires_pex.path(),
+ command=setup_py_snapshot_version_argv))
# TODO: convert this into a SimpleCodegenTask, which does the exact same thing as this method!
def _inject_synthetic_dist_requirements(self, dist, req_lib_addr):
diff --git a/src/python/pants/backend/python/tasks/setup_py.py b/src/python/pants/backend/python/tasks/setup_py.py
index b667384288e..4636ff251dc 100644
--- a/src/python/pants/backend/python/tasks/setup_py.py
+++ b/src/python/pants/backend/python/tasks/setup_py.py
@@ -11,9 +11,8 @@
from abc import ABC, abstractmethod
from collections import OrderedDict, defaultdict
from collections.abc import Iterable, Mapping, MutableSequence, Set
-from pathlib import Path
-from typing import Dict
+from pex.installer import Packager, WheelInstaller
from pex.interpreter import PythonInterpreter
from pex.pex import PEX
from pex.pex_builder import PEXBuilder
@@ -32,7 +31,6 @@
from pants.build_graph.address_lookup_error import AddressLookupError
from pants.build_graph.build_graph import sort_targets
from pants.build_graph.resources import Resources
-from pants.python.setup_py_runner import SetupPyRunner
from pants.task.task import Task
from pants.util.contextutil import temporary_file
from pants.util.dirutil import safe_concurrent_creation, safe_rmtree, safe_walk
@@ -108,6 +106,17 @@ def _write_repr(o, indent=False, level=0):
return output.getvalue()
+class SetupPyRunner(WheelInstaller):
+ # We extend WheelInstaller to make sure `setuptools` and `wheel` are available to setup.py.
+
+ def __init__(self, source_dir, setup_command, **kw):
+ self._setup_command = setup_command
+ super().__init__(source_dir, **kw)
+
+ def setup_command(self):
+ return self._setup_command
+
+
class TargetAncestorIterator:
"""Supports iteration of target ancestor lineages."""
@@ -395,10 +404,6 @@ def dependencies(self, target):
for binary in target.provided_binaries.values():
yield binary
- @classmethod
- def subsystem_dependencies(cls):
- return super().subsystem_dependencies() + (SetupPyRunner.Factory.scoped(cls),)
-
@classmethod
def prepare(cls, options, round_manager):
round_manager.require_data(PythonInterpreter)
@@ -669,14 +674,14 @@ def is_exported_python_target(t):
# phase to ensure an exported target is, for example (--run="sdist upload"), uploaded before any
# exported target that depends on it is uploaded.
- created: Dict[PythonTarget, Path] = {}
+ created = {}
def create(exported_python_target):
if exported_python_target not in created:
self.context.log.info('Creating setup.py project for {}'.format(exported_python_target))
subject = self.derived_by_original.get(exported_python_target, exported_python_target)
setup_dir, dependencies = self.create_setup_py(subject, dist_dir)
- created[exported_python_target] = Path(setup_dir)
+ created[exported_python_target] = setup_dir
if self._recursive:
for dep in dependencies:
if is_exported_python_target(dep):
@@ -687,29 +692,23 @@ def create(exported_python_target):
interpreter = self.context.products.get_data(PythonInterpreter)
python_dists = self.context.products.register_data(self.PYTHON_DISTS_PRODUCT, {})
-
- setup_runner = SetupPyRunner.Factory.create(
- scope=self,
- interpreter=interpreter,
- pex_file_path=os.path.join(self.workdir, self.fingerprint, 'setup-py-runner.pex')
- )
for exported_python_target in reversed(sort_targets(list(created.keys()))):
setup_dir = created.get(exported_python_target)
if setup_dir:
if not self._run:
- self.context.log.info('Running sdist against {}'.format(setup_dir))
- sdist = setup_runner.sdist(setup_dir)
- tgz_name = sdist.name
+ self.context.log.info('Running packager against {}'.format(setup_dir))
+ setup_runner = Packager(setup_dir, interpreter=interpreter)
+ tgz_name = os.path.basename(setup_runner.sdist())
sdist_path = os.path.join(dist_dir, tgz_name)
self.context.log.info('Writing {}'.format(sdist_path))
- shutil.move(sdist, sdist_path)
- safe_rmtree(str(setup_dir))
+ shutil.move(setup_runner.sdist(), sdist_path)
+ safe_rmtree(setup_dir)
python_dists[exported_python_target] = sdist_path
else:
self.context.log.info('Running {} against {}'.format(self._run, setup_dir))
split_command = safe_shlex_split(self._run)
- try:
- setup_runner.run_setup_command(source_dir=setup_dir, setup_command=split_command)
- except SetupPyRunner.CommandFailure as e:
- raise TaskError(f'Install failed: {e}')
+ setup_runner = SetupPyRunner(setup_dir, split_command, interpreter=interpreter)
+ installed = setup_runner.run()
+ if not installed:
+ raise TaskError('Install failed.')
python_dists[exported_python_target] = setup_dir
diff --git a/src/python/pants/backend/python/tasks/unpack_wheels.py b/src/python/pants/backend/python/tasks/unpack_wheels.py
index 2ff2a6058d6..a85a7ec6d98 100644
--- a/src/python/pants/backend/python/tasks/unpack_wheels.py
+++ b/src/python/pants/backend/python/tasks/unpack_wheels.py
@@ -12,6 +12,7 @@
from pants.backend.python.targets.unpacked_whls import UnpackedWheels
from pants.base.exceptions import TaskError
from pants.base.fingerprint_strategy import DefaultFingerprintHashingMixin, FingerprintStrategy
+from pants.fs.archive import ZIP
from pants.task.unpack_remote_sources_base import UnpackRemoteSourcesBase
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import mergetree, safe_concurrent_creation
@@ -50,30 +51,16 @@ def subsystem_dependencies(cls):
PythonSetup,
)
- class SingleDistExtractionError(Exception): pass
+ class _NativeCodeExtractionSetupFailure(Exception): pass
def _get_matching_wheel(self, pex_path, interpreter, requirements, module_name):
- """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex.
-
- N.B.: The resolved wheel is already "unpacked" by PEX. More accurately, it's installed in a
- chroot.
- """
+ """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex."""
with self.context.new_workunit('extract-native-wheels'):
with safe_concurrent_creation(pex_path) as chroot:
pex_builder = PexBuilderWrapper.Factory.create(
builder=PEXBuilder(path=chroot, interpreter=interpreter),
log=self.context.log)
-
- resolved_dists = pex_builder.resolve_distributions(requirements, platforms=['current'])
-
- matched_dists = [resolved_dist.distribution for resolved_dist in resolved_dists
- if resolved_dist.distribution.key == module_name]
- if len(matched_dists) != 1:
- raise self.SingleDistExtractionError(
- f"Exactly one dist was expected to match name {module_name} in requirements "
- f"{requirements}, found {matched_dists}"
- )
- return matched_dists[0]
+ return pex_builder.extract_single_dist_for_current_platform(requirements, module_name)
@memoized_method
def _compatible_interpreter(self, unpacked_whls):
@@ -86,18 +73,22 @@ class WheelUnpackingError(TaskError): pass
def unpack_target(self, unpacked_whls, unpack_dir):
interpreter = self._compatible_interpreter(unpacked_whls)
- with temporary_dir() as resolve_dir:
+ with temporary_dir() as resolve_dir,\
+ temporary_dir() as extract_dir:
try:
matched_dist = self._get_matching_wheel(resolve_dir, interpreter,
unpacked_whls.all_imported_requirements,
unpacked_whls.module_name)
- wheel_chroot = matched_dist.location
+ ZIP.extract(matched_dist.location, extract_dir)
if unpacked_whls.within_data_subdir:
- # N.B.: Wheels with data dirs have the data installed under the top module.
- dist_data_dir = os.path.join(wheel_chroot, unpacked_whls.module_name)
+ data_dir_prefix = '{name}-{version}.data/{subdir}'.format(
+ name=matched_dist.project_name,
+ version=matched_dist.version,
+ subdir=unpacked_whls.within_data_subdir,
+ )
+ dist_data_dir = os.path.join(extract_dir, data_dir_prefix)
else:
- dist_data_dir = wheel_chroot
-
+ dist_data_dir = extract_dir
unpack_filter = self.get_unpack_filter(unpacked_whls)
# Copy over the module's data files into `unpack_dir`.
mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter)
diff --git a/src/python/pants/init/plugin_resolver.py b/src/python/pants/init/plugin_resolver.py
index 2f0cfd404f9..e39b5f07ef8 100644
--- a/src/python/pants/init/plugin_resolver.py
+++ b/src/python/pants/init/plugin_resolver.py
@@ -10,10 +10,12 @@
from pex import resolver
from pex.interpreter import PythonInterpreter
from pkg_resources import working_set as global_working_set
+from wheel.install import WheelFile
from pants.backend.python.subsystems.python_repos import PythonRepos
from pants.option.global_options import GlobalOptionsRegistrar
-from pants.util.dirutil import safe_delete, safe_open
+from pants.util.contextutil import temporary_dir
+from pants.util.dirutil import safe_delete, safe_mkdir, safe_open
from pants.util.memo import memoized_property
from pants.util.strutil import ensure_text
from pants.version import PANTS_SEMVER
@@ -27,6 +29,29 @@ class PluginResolver:
def _is_wheel(path):
return os.path.isfile(path) and path.endswith('.whl')
+ @classmethod
+ def _activate_wheel(cls, wheel_path):
+ install_dir = '{}-install'.format(wheel_path)
+ if not os.path.isdir(install_dir):
+ with temporary_dir(root_dir=os.path.dirname(install_dir)) as tmp:
+ cls._install_wheel(wheel_path, tmp)
+ os.rename(tmp, install_dir)
+ # Activate any .pth files installed above.
+ site.addsitedir(install_dir)
+ return install_dir
+
+ @classmethod
+ def _install_wheel(cls, wheel_path, install_dir):
+ safe_mkdir(install_dir, clean=True)
+ WheelFile(wheel_path).install(force=True,
+ overrides={
+ 'purelib': install_dir,
+ 'headers': os.path.join(install_dir, 'headers'),
+ 'scripts': os.path.join(install_dir, 'bin'),
+ 'platlib': install_dir,
+ 'data': install_dir
+ })
+
def __init__(self, options_bootstrapper, *, interpreter=None):
self._options_bootstrapper = options_bootstrapper
self._interpreter = interpreter or PythonInterpreter.get()
@@ -45,9 +70,10 @@ def resolve(self, working_set=None):
"""
working_set = working_set or global_working_set
if self._plugin_requirements:
- for resolved_plugin_location in self._resolve_plugin_locations():
- site.addsitedir(resolved_plugin_location) # Activate any .pth files plugin wheels may have.
- working_set.add_entry(resolved_plugin_location)
+ for plugin_location in self._resolve_plugin_locations():
+ if self._is_wheel(plugin_location):
+ plugin_location = self._activate_wheel(plugin_location)
+ working_set.add_entry(plugin_location)
return working_set
def _resolve_plugin_locations(self):
@@ -79,11 +105,16 @@ def _resolve_plugin_locations(self):
def _resolve_plugins(self):
logger.info('Resolving new plugins...:\n {}'.format('\n '.join(self._plugin_requirements)))
resolved_dists = resolver.resolve(self._plugin_requirements,
- indexes=self._python_repos.indexes,
- find_links=self._python_repos.repos,
+ fetchers=self._python_repos.get_fetchers(),
interpreter=self._interpreter,
+ context=self._python_repos.get_network_context(),
cache=self.plugin_cache_dir,
- allow_prereleases=PANTS_SEMVER.is_prerelease)
+ # Effectively never expire.
+ cache_ttl=10 * 365 * 24 * 60 * 60,
+ allow_prereleases=PANTS_SEMVER.is_prerelease,
+ # Plugins will all depend on `pantsbuild.pants` which is
+ # distributed as a manylinux wheel.
+ use_manylinux=True)
return [resolved_dist.distribution for resolved_dist in resolved_dists]
@property
diff --git a/src/python/pants/python/BUILD b/src/python/pants/python/BUILD
deleted file mode 100644
index 6006f606571..00000000000
--- a/src/python/pants/python/BUILD
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
-# Licensed under the Apache License, Version 2.0 (see LICENSE).
-
-python_library(
- dependencies = [
- '3rdparty/python:pex',
- 'src/python/pants/util:dirutil',
- 'src/python/pants/util:memo',
- ]
-)
diff --git a/src/python/pants/python/__init__.py b/src/python/pants/python/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/src/python/pants/python/setup_py_runner.py b/src/python/pants/python/setup_py_runner.py
deleted file mode 100644
index 33af87e1454..00000000000
--- a/src/python/pants/python/setup_py_runner.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
-# Licensed under the Apache License, Version 2.0 (see LICENSE).
-
-import os
-from pathlib import Path
-from typing import Callable, Iterable, List, Optional
-
-from pex.interpreter import PythonInterpreter
-from pex.pex import PEX
-from pex.pex_info import PexInfo
-
-from pants.backend.python.python_requirement import PythonRequirement
-from pants.binaries.executable_pex_tool import ExecutablePexTool
-from pants.option.optionable import Optionable
-from pants.util.contextutil import pushd
-from pants.util.dirutil import safe_mkdtemp
-from pants.util.memo import memoized_method
-from pants.util.strutil import safe_shlex_join
-
-
-class SetupPyRunner:
- """A utility capable of executing setup.py commands in a hermetic environment.
-
- Supports `setuptools` and `wheel` distutils commands by default.
- """
-
- class Factory(ExecutablePexTool):
- options_scope = 'setup-py-runner'
- deprecated_options_scope = 'build-setup-requires-pex'
- deprecated_options_scope_removal_version = '1.25.0.dev2'
-
- @classmethod
- def register_options(cls, register: Callable[..., None]) -> None:
- super().register_options(register)
- register('--setuptools-version', advanced=True, fingerprint=True, default='41.6.0',
- help='The setuptools version to use when executing `setup.py` scripts.')
- register('--wheel-version', advanced=True, fingerprint=True, default='0.33.6',
- help='The wheel version to use when executing `setup.py` scripts.')
-
- @classmethod
- def create(
- cls,
- *,
- pex_file_path: Optional[Path] = None,
- extra_reqs: Optional[List[PythonRequirement]] = None,
- interpreter: Optional[PythonInterpreter] = None,
- scope: Optional[Optionable] = None,
- ) -> 'SetupPyRunner':
- factory = cls.scoped_instance(scope) if scope is not None else cls.global_instance()
- requirements_pex = factory.bootstrap(
- interpreter=interpreter,
- pex_file_path=pex_file_path or os.path.join(safe_mkdtemp(), 'setup-py-runner.pex'),
- extra_reqs=extra_reqs
- )
- return SetupPyRunner(requirements_pex=requirements_pex)
-
- @property
- def base_requirements(self):
- return [
- PythonRequirement(f'setuptools=={self.get_options().setuptools_version}'),
- PythonRequirement(f'wheel=={self.get_options().wheel_version}'),
- ]
-
- class CommandFailure(Exception):
- """Indicates an error executing setup.py commands."""
-
- def __init__(self, requirements_pex: PEX) -> None:
- self._requirements_pex = requirements_pex
-
- @memoized_method
- def __str__(self) -> str:
- pex_path = self._requirements_pex.path()
- pex_info = PexInfo.from_pex(pex_path)
- requirements = "\n ".join(map(str, pex_info.requirements))
- return f'{type(self).__name__} at {pex_path} with requirements:\n {requirements} '
-
- def _create_python_args(self, setup_command: Iterable[str]) -> Iterable[str]:
- args = ['setup.py', '--no-user-cfg']
- args.extend(setup_command)
- return args
-
- def cmdline(self, setup_command: Iterable[str]) -> Iterable[str]:
- """Returns the command line that would be used to execute the given setup.py command."""
- return self._requirements_pex.cmdline(self._create_python_args(setup_command))
-
- def run_setup_command(self, *, source_dir: Path, setup_command: Iterable[str], **kwargs) -> None:
- """Runs the given setup.py command against the setup.py project in `source_dir`.
-
- :raises: :class:`SetupPyRunner.CommandFailure` if there was a problem executing the command.
- """
- with pushd(str(source_dir)):
- result = self._requirements_pex.run(args=self._create_python_args(setup_command), **kwargs)
- if result != 0:
- pex_command = safe_shlex_join(self.cmdline(setup_command))
- raise self.CommandFailure(f'Failed to execute {pex_command} using {self}')
-
- def _collect_distribution(
- self,
- source_dir: Path,
- setup_command: Iterable[str],
- dist_dir: Path
- ) -> Path:
-
- assert source_dir.is_dir()
- self._source_dir = source_dir
-
- self.run_setup_command(
- source_dir=source_dir,
- setup_command=setup_command
- )
-
- dists = os.listdir(dist_dir)
- if len(dists) == 0:
- raise self.CommandFailure('No distribution was produced!')
- if len(dists) > 1:
- ambiguous_dists = "\n ".join(dists)
- raise self.CommandFailure(f'Ambiguous distributions found:\n {ambiguous_dists}')
-
- return dist_dir.joinpath(dists[0])
-
- @memoized_method
- def sdist(self, source_dir: Path) -> Path:
- """Generates an sdist from the setup.py project at `source_dir` and returns the sdist path."""
- dist_dir = safe_mkdtemp()
- return self._collect_distribution(
- source_dir=source_dir,
- setup_command=['sdist', '--dist-dir', dist_dir],
- dist_dir=Path(dist_dir)
- )
-
- @memoized_method
- def bdist(self, source_dir: Path) -> Path:
- """Generates a wheel from the setup.py project at `source_dir` and returns the wheel path."""
- dist_dir = safe_mkdtemp()
- return self._collect_distribution(
- source_dir=source_dir,
- setup_command=['bdist_wheel', '--dist-dir', dist_dir],
- dist_dir=Path(dist_dir)
- )
diff --git a/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py b/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py
index 7defcdec993..04ea672809f 100644
--- a/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py
+++ b/tests/python/pants_test/backend/codegen/thrift/python/test_apache_thrift_py_gen.py
@@ -176,8 +176,8 @@ def test_namespace_effective(self):
for resolved_dist in resolve([f'thrift=={self.get_thrift_version(apache_thrift_gen)}',
'setuptools==40.6.3'],
interpreter=interpreter,
- indexes=python_repos.indexes,
- find_links=python_repos.repos):
+ context=python_repos.get_network_context(),
+ fetchers=python_repos.get_fetchers()):
pythonpath.append(resolved_dist.distribution.location)
process = subprocess.Popen([interpreter.binary,
diff --git a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py
index 4ca1f56acd2..da714892289 100644
--- a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py
+++ b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py
@@ -207,11 +207,6 @@ def test_pants_native_source_detection_for_local_ctypes_dists_for_current_platfo
(2) a different platform than the one we are currently running on. The python_binary() target
below is declared with `platforms="current"`.
"""
-
- # The implementation abbreviation of 'dne' (does not exist), is ~guaranteed not to match our
- # current platform while still providing an overall valid platform identifier string.
- foreign_platform = 'macosx-10.5-x86_64-dne-37-m'
-
command = [
'run',
'testprojects/src/python/python_distribution/ctypes:bin'
@@ -223,7 +218,7 @@ def test_pants_native_source_detection_for_local_ctypes_dists_for_current_platfo
'toolchain_variant': 'llvm',
},
'python-setup': {
- 'platforms': ['current', foreign_platform]
+ 'platforms': ['current', 'this_platform_does_not_exist']
},
})
self.assert_success(pants_run)
diff --git a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py
index a7b503c3164..c3b6a93a18b 100644
--- a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py
+++ b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py
@@ -1,10 +1,14 @@
# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
+import re
from collections import OrderedDict
+import pex.resolver
+
from pants.backend.python.python_requirement import PythonRequirement
from pants.backend.python.targets.python_distribution import PythonDistribution
+from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants_test.backend.python.tasks.util.build_local_dists_test_base import (
BuildLocalPythonDistributionsTestBase,
@@ -81,6 +85,15 @@ class TestBuildLocalPythonDistributions(BuildLocalPythonDistributionsTestBase):
""",
},
}),
+
+ ('src/python/install_requires:conflict', {
+ 'key': 'install_requires_conflict',
+ 'target_type': PythonLibrary,
+ 'dependencies': [
+ '3rdparty/python:pycountry',
+ 'src/python/install_requires:install_requires',
+ ],
+ }),
])
def test_create_distribution(self):
@@ -110,3 +123,15 @@ def test_install_requires(self):
expected_platform=self.ExpectedPlatformType.any,
dist_target=install_requires_dist,
)
+
+ def test_install_requires_conflict(self):
+ install_requires_dist = self.target_dict['install_requires']
+ pycountry_req_lib = self.target_dict['pycountry']
+ conflicting_lib = self.target_dict['install_requires_conflict']
+
+ with self.assertRaisesRegexp(
+ pex.resolver.Unsatisfiable,
+ re.escape('Could not satisfy all requirements for pycountry==18.5.20:')):
+ self._create_distribution_synthetic_target(
+ install_requires_dist,
+ extra_targets=[pycountry_req_lib, conflicting_lib])
diff --git a/tests/python/pants_test/backend/python/tasks/test_pytest_run.py b/tests/python/pants_test/backend/python/tasks/test_pytest_run.py
index b0fd842434d..71da6193113 100644
--- a/tests/python/pants_test/backend/python/tasks/test_pytest_run.py
+++ b/tests/python/pants_test/backend/python/tasks/test_pytest_run.py
@@ -2,13 +2,13 @@
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import configparser
+import functools
import os
from contextlib import contextmanager
from textwrap import dedent
import coverage
-from pants.backend.python.subsystems.python_setup import PythonSetup
from pants.backend.python.targets.python_tests import PythonTests
from pants.backend.python.tasks.gather_sources import GatherSources
from pants.backend.python.tasks.pytest_prep import PytestPrep
@@ -52,17 +52,6 @@ def task_type(cls):
PytestPrepCoverageVersionPinned,
]
- @classmethod
- def setUpClass(cls):
- super().setUpClass()
- cls.resolver_cache_dir = safe_mkdtemp()
-
- def set_other_options(self):
- self.set_options_for_scope(
- PythonSetup.options_scope,
- resolver_cache_dir=self.resolver_cache_dir,
- )
-
_CONFTEST_CONTENT = '# I am an existing root-level conftest file.'
_default_test_options = {
@@ -78,7 +67,6 @@ def _augment_options(self, options):
def run_tests(self, targets, *passthru_args, **options):
"""Run the tests in the specified targets, with the specified PytestRun task options."""
self.set_options(**self._augment_options(options))
- self.set_other_options()
with pushd(self.build_root):
result = self.invoke_tasks(
target_roots=targets,
@@ -88,7 +76,6 @@ def run_tests(self, targets, *passthru_args, **options):
def run_failing_tests(self, targets, failed_targets, *passthru_args, **options):
self.set_options(**self._augment_options(options))
- self.set_other_options()
with self.assertRaises(ErrorWhileTesting) as cm:
with pushd(self.build_root):
self.invoke_tasks(
@@ -892,6 +879,100 @@ def test_sharding_invalid_shard_bad_format(self):
with self.assertRaises(PytestRun.InvalidShardSpecification):
self.run_tests(targets=[self.green], test_shard='1/a')
+ @contextmanager
+ def marking_tests(self):
+ init_subsystem(Target.Arguments)
+ init_subsystem(SourceRootConfig)
+
+ with temporary_dir() as marker_dir:
+ self.create_file(
+ 'test/python/passthru/test_passthru.py',
+ dedent("""
+ import inspect
+ import os
+ import pytest
+ import unittest
+
+
+ class PassthruTest(unittest.TestCase):
+ def touch(self, path):
+ with open(path, 'wb') as fp:
+ fp.close()
+
+ def mark_test_run(self):
+ caller_frame_record = inspect.stack()[1]
+
+ # For the slot breakdown of a frame record tuple, see:
+ # https://docs.python.org/2/library/inspect.html#the-interpreter-stack
+ _, _, _, caller_func_name, _, _ = caller_frame_record
+
+ marker_file = os.path.join({marker_dir!r}, caller_func_name)
+ self.touch(marker_file)
+
+ def test_one(self):
+ self.mark_test_run()
+
+ @pytest.mark.purple
+ def test_two(self):
+ self.mark_test_run()
+
+ def test_three(self):
+ self.mark_test_run()
+
+ @pytest.mark.red
+ def test_four(self):
+ self.mark_test_run()
+
+ @pytest.mark.green
+ def test_five(self):
+ self.mark_test_run()
+ """.format(marker_dir=marker_dir)))
+
+ def assert_mark(exists, name):
+ message = f"{('Expected' if exists else 'Did not expect')} {name!r} to be executed."
+ marker_file = os.path.join(marker_dir, name)
+ self.assertEqual(exists, os.path.exists(marker_file), message)
+
+ self.add_to_build_file('test/python/passthru', 'python_tests()')
+ test = self.target('test/python/passthru')
+ yield test, functools.partial(assert_mark, True), functools.partial(assert_mark, False)
+
+ def test_passthrough_args_facility_single_style(self):
+ with self.marking_tests() as (target, assert_test_run, assert_test_not_run):
+ self.run_tests([target], '-ktest_one or test_two')
+ assert_test_run('test_one')
+ assert_test_run('test_two')
+ assert_test_not_run('test_three')
+ assert_test_not_run('test_four')
+ assert_test_not_run('test_five')
+
+ def test_passthrough_args_facility_plus_arg_style(self):
+ with self.marking_tests() as (target, assert_test_run, assert_test_not_run):
+ self.run_tests([target], '-m', 'purple or red')
+ assert_test_not_run('test_one')
+ assert_test_run('test_two')
+ assert_test_not_run('test_three')
+ assert_test_run('test_four')
+ assert_test_not_run('test_five')
+
+ def test_passthrough_added_after_options(self):
+ with self.marking_tests() as (target, assert_test_run, assert_test_not_run):
+ self.run_tests([target], '-m', 'purple or red', '-k', 'two')
+ assert_test_not_run('test_one')
+ assert_test_run('test_two')
+ assert_test_not_run('test_three')
+ assert_test_not_run('test_four')
+ assert_test_not_run('test_five')
+
+ def test_options_shlexed(self):
+ with self.marking_tests() as (target, assert_test_run, assert_test_not_run):
+ self.run_tests([target], "-m", "purple or red")
+ assert_test_not_run('test_one')
+ assert_test_run('test_two')
+ assert_test_not_run('test_three')
+ assert_test_run('test_four')
+ assert_test_not_run('test_five')
+
@contextmanager
def run_with_junit_xml_dir(self, targets):
with temporary_dir() as dist:
diff --git a/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py b/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py
index c84e4472160..07af51390c4 100644
--- a/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py
+++ b/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py
@@ -12,9 +12,9 @@
from pants.util.contextutil import open_zip, temporary_dir
-_LINUX_PLATFORM = "manylinux1-x86_64-cp-36-m"
+_LINUX_PLATFORM = "linux-x86_64"
_LINUX_WHEEL_SUBSTRING = "manylinux"
-_OSX_PLATFORM = "macosx-10.13-x86_64-cp-36-m"
+_OSX_PLATFORM = "macosx-10.13-x86_64"
_OSX_WHEEL_SUBSTRING = "macosx"
@@ -123,8 +123,8 @@ def platforms_test_impl(
want_present_platforms,
want_missing_platforms=(),
):
- def p537_deps(deps):
- return [d for d in deps if 'p537' in d]
+ def numpy_deps(deps):
+ return [d for d in deps if 'numpy' in d]
def assertInAny(substring, collection):
self.assertTrue(any(substring in d for d in collection),
f'Expected an entry matching "{substring}" in {collection}')
@@ -139,7 +139,7 @@ def assertNotInAny(substring, collection):
with self.caching_config() as config, self.mock_buildroot() as buildroot, buildroot.pushd():
config['python-setup'] = {
- 'platforms': []
+ 'platforms': None
}
buildroot.write_file(test_src, '')
@@ -154,7 +154,7 @@ def assertNotInAny(substring, collection):
python_requirement_library(
name='numpy',
requirements=[
- python_requirement('p537==1.0.4')
+ python_requirement('numpy==1.14.5')
]
)
@@ -176,7 +176,7 @@ def assertNotInAny(substring, collection):
self.assert_success(result)
with open_zip(test_pex) as z:
- deps = p537_deps(z.namelist())
+ deps = numpy_deps(z.namelist())
for platform in want_present_platforms:
assertInAny(platform, deps)
for platform in want_missing_platforms:
diff --git a/tests/python/pants_test/init/BUILD b/tests/python/pants_test/init/BUILD
index d27fa3befd4..4676505aa5d 100644
--- a/tests/python/pants_test/init/BUILD
+++ b/tests/python/pants_test/init/BUILD
@@ -18,7 +18,6 @@ python_tests(
'src/python/pants/init',
'src/python/pants/option',
'src/python/pants/pantsd:pants_daemon',
- 'src/python/pants/python',
'src/python/pants/subsystem',
'src/python/pants/util:contextutil',
'src/python/pants/util:dirutil',
diff --git a/tests/python/pants_test/init/test_plugin_resolver.py b/tests/python/pants_test/init/test_plugin_resolver.py
index cd1f3b12287..64bf394a253 100644
--- a/tests/python/pants_test/init/test_plugin_resolver.py
+++ b/tests/python/pants_test/init/test_plugin_resolver.py
@@ -2,82 +2,47 @@
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
-import shutil
+import time
import unittest
-from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
-from dataclasses import dataclass
-from pathlib import Path
from textwrap import dedent
from parameterized import parameterized
+from pex.crawler import Crawler
+from pex.installer import EggInstaller, Packager, WheelInstaller
from pex.interpreter import PythonInterpreter
from pex.resolver import Unsatisfiable
from pkg_resources import Requirement, WorkingSet
from pants.init.plugin_resolver import PluginResolver
from pants.option.options_bootstrapper import OptionsBootstrapper
-from pants.python.setup_py_runner import SetupPyRunner
from pants.testutil.interpreter_selection_utils import (
PY_36,
PY_37,
python_interpreter_path,
skip_unless_python36_and_python37_present,
)
-from pants.testutil.subsystem.util import init_subsystem
from pants.util.contextutil import temporary_dir
-from pants.util.dirutil import safe_rmtree, touch
+from pants.util.dirutil import safe_open, safe_rmtree, touch
req = Requirement.parse
-
-@dataclass(frozen=True)
-class Installer(metaclass=ABCMeta):
- source_dir: Path
- install_dir: Path
-
- def run(self) -> None:
- init_subsystem(SetupPyRunner.Factory)
- dist = self._create_dist(SetupPyRunner.Factory.create())
- shutil.copy(dist, self.install_dir)
-
- @abstractmethod
- def _create_dist(self, runner: SetupPyRunner) -> Path:
- ...
-
-
-class SdistInstaller(Installer):
- def _create_dist(self, runner: SetupPyRunner) -> Path:
- return runner.sdist(source_dir=self.source_dir)
-
-
-class WheelInstaller(Installer):
- def _create_dist(self, runner: SetupPyRunner):
- return runner.bdist(source_dir=self.source_dir)
-
-
-INSTALLERS = [('sdist', SdistInstaller), ('whl', WheelInstaller)]
+INSTALLERS = [('sdist', Packager), ('egg', EggInstaller), ('whl', WheelInstaller)]
class PluginResolverTest(unittest.TestCase):
-
- DEFAULT_VERSION = '0.0.0'
-
- @classmethod
- def create_plugin(cls, distribution_repo_dir, plugin, version=None, packager_cls=None):
- distribution_repo_dir = Path(distribution_repo_dir)
-
- source_dir = distribution_repo_dir.joinpath(plugin)
- source_dir.mkdir(parents=True)
- source_dir.joinpath('setup.py').write_text(dedent(f"""
+ @staticmethod
+ def create_plugin(distribution_repo_dir, plugin, version=None, packager_cls=None):
+ with safe_open(os.path.join(distribution_repo_dir, plugin, 'setup.py'), 'w') as fp:
+ fp.write(dedent(f"""
from setuptools import setup
- setup(name="{plugin}", version="{version or cls.DEFAULT_VERSION}")
+ setup(name="{plugin}", version="{version or '0.0.0'}")
"""))
- packager_cls = packager_cls or SdistInstaller
- packager = packager_cls(source_dir=source_dir,
+ packager_cls = packager_cls or Packager
+ packager = packager_cls(source_dir=os.path.join(distribution_repo_dir, plugin),
install_dir=distribution_repo_dir)
packager.run()
@@ -118,30 +83,27 @@ def provide_chroot(existing):
options_bootstrapper = OptionsBootstrapper.create(env=env, args=args)
plugin_resolver = PluginResolver(options_bootstrapper, interpreter=interpreter)
cache_dir = plugin_resolver.plugin_cache_dir
-
- working_set = plugin_resolver.resolve(WorkingSet(entries=[]))
- for dist in working_set:
- self.assertIn(Path(cache_dir), Path(dist.location).parents)
-
- yield working_set, root_dir, repo_dir, cache_dir
+ yield plugin_resolver.resolve(WorkingSet(entries=[])), root_dir, repo_dir, cache_dir
def test_no_plugins(self):
with self.plugin_resolution() as (working_set, _, _, _):
- self.assertEqual([], list(working_set))
+ self.assertEqual([], working_set.entries)
@parameterized.expand(INSTALLERS)
def test_plugins(self, unused_test_name, packager_cls):
with self.plugin_resolution(plugins=[('jake', '1.2.3'), 'jane'],
packager_cls=packager_cls) as (working_set, _, _, cache_dir):
-
- def assert_dist_version(name, expected_version):
- dist = working_set.find(req(name))
- self.assertEqual(expected_version, dist.version)
-
self.assertEqual(2, len(working_set.entries))
- assert_dist_version(name='jake', expected_version='1.2.3')
- assert_dist_version(name='jane', expected_version=self.DEFAULT_VERSION)
+ dist = working_set.find(req('jake'))
+ self.assertIsNotNone(dist)
+ self.assertEqual(os.path.realpath(cache_dir),
+ os.path.realpath(os.path.dirname(dist.location)))
+
+ dist = working_set.find(req('jane'))
+ self.assertIsNotNone(dist)
+ self.assertEqual(os.path.realpath(cache_dir),
+ os.path.realpath(os.path.dirname(dist.location)))
@parameterized.expand(INSTALLERS)
def test_exact_requirements(self, unused_test_name, packager_cls):
@@ -149,16 +111,17 @@ def test_exact_requirements(self, unused_test_name, packager_cls):
packager_cls=packager_cls) as results:
working_set, chroot, repo_dir, cache_dir = results
+ self.assertEqual(2, len(working_set.entries))
+
# Kill the repo source dir and re-resolve. If the PluginResolver truly detects exact
# requirements it should skip any resolves and load directly from the still in-tact cache.
safe_rmtree(repo_dir)
with self.plugin_resolution(chroot=chroot,
plugins=[('jake', '1.2.3'), ('jane', '3.4.5')]) as results2:
-
working_set2, _, _, _ = results2
- self.assertEqual(list(working_set), list(working_set2))
+ self.assertEqual(working_set.entries, working_set2.entries)
@parameterized.expand(INSTALLERS)
@skip_unless_python36_and_python37_present
@@ -169,11 +132,12 @@ def test_exact_requirements_interpreter_change(self, unused_test_name, packager_
with self.plugin_resolution(interpreter=python36,
plugins=[('jake', '1.2.3'), ('jane', '3.4.5')],
packager_cls=packager_cls) as results:
-
working_set, chroot, repo_dir, cache_dir = results
+ self.assertEqual(2, len(working_set.entries))
+
safe_rmtree(repo_dir)
- with self.assertRaises(Unsatisfiable):
+ with self.assertRaises(FileNotFoundError):
with self.plugin_resolution(interpreter=python37,
chroot=chroot,
plugins=[('jake', '1.2.3'), ('jane', '3.4.5')]):
@@ -185,22 +149,25 @@ def test_exact_requirements_interpreter_change(self, unused_test_name, packager_
with self.plugin_resolution(interpreter=python36,
chroot=chroot,
plugins=[('jake', '1.2.3'), ('jane', '3.4.5')]) as results2:
-
working_set2, _, _, _ = results2
- self.assertEqual(list(working_set), list(working_set2))
+
+ self.assertEqual(working_set.entries, working_set2.entries)
@parameterized.expand(INSTALLERS)
def test_inexact_requirements(self, unused_test_name, packager_cls):
with self.plugin_resolution(plugins=[('jake', '1.2.3'), 'jane'],
packager_cls=packager_cls) as results:
-
working_set, chroot, repo_dir, cache_dir = results
+ self.assertEqual(2, len(working_set.entries))
+
# Kill the cache and the repo source dir and wait past our 1s test TTL, if the PluginResolver
# truly detects inexact plugin requirements it should skip perma-caching and fall through to
- # a pex resolve and then fail.
+ # pex to a TLL expiry resolve and then fail.
safe_rmtree(repo_dir)
safe_rmtree(cache_dir)
+ Crawler.reset_cache()
+ time.sleep(1.5)
with self.assertRaises(Unsatisfiable):
with self.plugin_resolution(chroot=chroot, plugins=[('jake', '1.2.3'), 'jane']):
diff --git a/tests/python/pants_test/pantsd/test_pantsd_integration.py b/tests/python/pants_test/pantsd/test_pantsd_integration.py
index 448a38f9c61..461f75699f0 100644
--- a/tests/python/pants_test/pantsd/test_pantsd_integration.py
+++ b/tests/python/pants_test/pantsd/test_pantsd_integration.py
@@ -760,7 +760,10 @@ def test_unhandled_exceptions_only_log_exceptions_once(self):
checker.assert_running()
self.assert_failure(result)
# Assert that the desired exception has been triggered once.
- self.assertRegex(result.stderr_data, r'Exception message:.*badreq==99.99.99')
+ self.assertIn(
+ """Exception message: Could not satisfy all requirements for badreq==99.99.99:\n badreq==99.99.99""",
+ result.stderr_data,
+ )
# Assert that it has only been triggered once.
self.assertNotIn(
'During handling of the above exception, another exception occurred:',