Skip to content

Commit

Permalink
Support multiple disjoint Python resolves via `[python].experimental_…
Browse files Browse the repository at this point in the history
…resolves` (#14299)

Read the `help` message in `python/setup.py` for an explanation of how this works + what we mean by "disjoint". 

This wires up `experimental_resolve` to `python_source`, `python_awslambda`, and `python_google_cloud_function`. Thanks to the implementation in `pex_from_targets.py`, that means that they automatically support this new feature. The only target not yet supported is `python_distribution`.

Key note: `python_source` has the field `resolve`, rather than what we originally envisioned with `compatible_resolves`. This is because we realized in #13882 and its design doc at https://docs.google.com/document/d/1mzZWnXiE6OkgMH_Dm7WeA3ck9veusQmr_c6GWPb_tsQ/edit# that the way to support a `python_source` target working with multiple resolves is via parametrization, which will create a distinct target for each resolve. 

However, `python_requirement` still uses `compatible_resolves` (for now at least) because we don't directly run tools like MyPy and Pytest on those. More concretely, they are never the "root" used to determine which resolve to use for a build.

[ci skip-rust]
  • Loading branch information
Eric-Arellano authored Jan 29, 2022
1 parent c09289d commit 0488605
Show file tree
Hide file tree
Showing 9 changed files with 325 additions and 61 deletions.
14 changes: 12 additions & 2 deletions src/python/pants/backend/awslambda/python/target_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
PythonModuleOwnersRequest,
)
from pants.backend.python.dependency_inference.rules import PythonInferSubsystem, import_rules
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import PythonResolveField
from pants.core.goals.package import OutputPathField
from pants.engine.addresses import Address
from pants.engine.fs import GlobMatchErrorBehavior, PathGlobs, Paths
Expand Down Expand Up @@ -130,7 +132,9 @@ class InjectPythonLambdaHandlerDependency(InjectDependenciesRequest):

@rule(desc="Inferring dependency from the python_awslambda `handler` field")
async def inject_lambda_handler_dependency(
request: InjectPythonLambdaHandlerDependency, python_infer_subsystem: PythonInferSubsystem
request: InjectPythonLambdaHandlerDependency,
python_infer_subsystem: PythonInferSubsystem,
python_setup: PythonSetup,
) -> InjectedDependencies:
if not python_infer_subsystem.entry_points:
return InjectedDependencies()
Expand All @@ -143,7 +147,12 @@ async def inject_lambda_handler_dependency(
),
)
module, _, _func = handler.val.partition(":")
owners = await Get(PythonModuleOwners, PythonModuleOwnersRequest(module, resolve=None))
owners = await Get(
PythonModuleOwners,
PythonModuleOwnersRequest(
module, resolve=original_tgt.target[PythonResolveField].normalized_value(python_setup)
),
)
address = original_tgt.target.address
explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
owners.ambiguous,
Expand Down Expand Up @@ -202,6 +211,7 @@ class PythonAWSLambda(Target):
PythonAwsLambdaDependencies,
PythonAwsLambdaHandlerField,
PythonAwsLambdaRuntime,
PythonResolveField,
)
help = (
"A self-contained Python function suitable for uploading to AWS Lambda.\n\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
PythonModuleOwnersRequest,
)
from pants.backend.python.dependency_inference.rules import PythonInferSubsystem, import_rules
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import PythonResolveField
from pants.core.goals.package import OutputPathField
from pants.engine.addresses import Address
from pants.engine.fs import GlobMatchErrorBehavior, PathGlobs, Paths
Expand Down Expand Up @@ -133,6 +135,7 @@ class InjectPythonCloudFunctionHandlerDependency(InjectDependenciesRequest):
async def inject_cloud_function_handler_dependency(
request: InjectPythonCloudFunctionHandlerDependency,
python_infer_subsystem: PythonInferSubsystem,
python_setup: PythonSetup,
) -> InjectedDependencies:
if not python_infer_subsystem.entry_points:
return InjectedDependencies()
Expand All @@ -147,7 +150,12 @@ async def inject_cloud_function_handler_dependency(
),
)
module, _, _func = handler.val.partition(":")
owners = await Get(PythonModuleOwners, PythonModuleOwnersRequest(module, resolve=None))
owners = await Get(
PythonModuleOwners,
PythonModuleOwnersRequest(
module, resolve=original_tgt.target[PythonResolveField].normalized_value(python_setup)
),
)
address = original_tgt.target.address
explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
owners.ambiguous,
Expand Down Expand Up @@ -229,6 +237,7 @@ class PythonGoogleCloudFunction(Target):
PythonGoogleCloudFunctionHandlerField,
PythonGoogleCloudFunctionRuntime,
PythonGoogleCloudFunctionType,
PythonResolveField,
)
help = (
"A self-contained Python function suitable for uploading to Google Cloud Function.\n\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,12 +193,7 @@ async def infer_python_dependencies_via_imports(
),
)

resolve = (
tgt[PythonResolveField].normalized_value(python_setup)
if tgt.has_field(PythonResolveField)
else None
)

resolve = tgt[PythonResolveField].normalized_value(python_setup)
owners_per_import = await MultiGet(
Get(PythonModuleOwners, PythonModuleOwnersRequest(imported_module, resolve=resolve))
for imported_module in parsed_imports
Expand Down
4 changes: 2 additions & 2 deletions src/python/pants/backend/python/lint/flake8/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
Flake8FirstPartyPlugins,
)
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.util_rules import pex_from_targets
from pants.backend.python.util_rules import pex
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import PexRequest, VenvPex, VenvPexProcess
from pants.core.goals.lint import REPORT_DIR, LintRequest, LintResult, LintResults
Expand Down Expand Up @@ -137,4 +137,4 @@ async def flake8_lint(


def rules():
return [*collect_rules(), UnionRule(LintRequest, Flake8Request), *pex_from_targets.rules()]
return [*collect_rules(), UnionRule(LintRequest, Flake8Request), *pex.rules()]
2 changes: 2 additions & 0 deletions src/python/pants/backend/python/lint/flake8/subsystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
PythonRequirementsField,
PythonSourceField,
)
from pants.backend.python.util_rules import python_sources
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import PexRequirements
from pants.backend.python.util_rules.python_sources import (
Expand Down Expand Up @@ -287,5 +288,6 @@ def rules():
return (
*collect_rules(),
*lockfile.rules(),
*python_sources.rules(),
UnionRule(GenerateToolLockfileSentinel, Flake8LockfileSentinel),
)
34 changes: 28 additions & 6 deletions src/python/pants/backend/python/subsystems/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,12 +138,34 @@ def register_options(cls, register):
default={"python-default": "3rdparty/python/default_lock.txt"},
help=(
"A mapping of logical names to lockfile paths used in your project.\n\n"
"For now, things only work properly if you define a single resolve and set "
"`[python].experimental_default_resolve` to that value. We are close to "
"properly supporting multiple (disjoint) resolves.\n\n"
"To generate a lockfile, run `./pants generate-lockfiles --resolve=<name>` or "
"`./pants generate-lockfiles` to generate for all resolves (including tool "
"lockfiles).\n\n"
"Many organizations only need a single resolve for their whole project, which is "
"a good default and the simplest thing to do. However, you may need multiple "
"resolves, such as if you use two conflicting versions of a requirement in "
"your repository.\n\n"
"For now, Pants only has first-class support for disjoint resolves, meaning that "
"you cannot ergonomically set a `python_source` target, for example, to work "
"with multiple resolves. Practically, this means that you cannot yet reuse common "
"code, such as util files, across projects using different resolves. Support for "
"overlapping resolves is coming soon.\n\n"
"If you only need a single resolve, run `./pants generate-lockfiles` to generate "
"the lockfile.\n\n"
"If you need multiple resolves:\n\n"
" 1. Via this option, define multiple resolve "
"names and their lockfile paths. The names should be meaningful to your "
"repository, such as `data-science` or `pants-plugins`.\n"
" 2. Set the default with "
"`[python].experimental_default_resolve`.\n"
" 3. Update your `python_requirement` targets with the "
"`experimental_compatible_resolves` field to declare which resolve(s) they should "
"be available in. They default to `[python].experimental_default_resolve`, so you "
"only need to update targets that you want in non-default resolves. "
"(Often you'll set this via the `python_requirements` or `poetry_requirements` "
"target generators)\n"
" 4. Run `./pants generate-lockfiles` to generate the lockfiles. If the results "
"aren't what you'd expect, adjust the prior step.\n"
" 5. Update any targets like `python_source` / `python_sources`, "
"`python_test` / `python_tests`, and `pex_binary` which need to set a non-default "
"resolve with the `experimental_resolve` field.\n\n"
"Only applies if `[python].enable_resolves` is true.\n\n"
"This option is experimental and may change without the normal deprecation policy."
),
Expand Down
57 changes: 25 additions & 32 deletions src/python/pants/backend/python/target_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,11 @@ class PythonResolveField(StringField, AsyncFieldMixin):
help = (
"The resolve from `[python].experimental_resolves` to use.\n\n"
"If not defined, will default to `[python].default_resolve`.\n\n"
"Only applies if `[python].enable_resolves` is true.\n\n"
"All dependencies must share the same resolve. This means that you can only depend on "
"first-party targets like `python_source` that set their `experimental_resolve` field "
"to the same value, and on `python_requirement` targets that include the resolve in "
"their `experimental_compatible_resolves` field.\n\n"
"This field is experimental and may change without the normal deprecation policy."
# TODO: Document expectations for dependencies once we validate that.
)

def normalized_value(self, python_setup: PythonSetup) -> str:
Expand All @@ -136,31 +138,6 @@ def resolve_and_lockfile(self, python_setup: PythonSetup) -> tuple[str, str] | N
return (resolve, python_setup.resolves[resolve])


class PythonCompatibleResolvesField(StringSequenceField, AsyncFieldMixin):
alias = "experimental_compatible_resolves"
required = False
help = (
"The set of resolves from `[python].experimental_resolves` that this target is "
"compatible with.\n\n"
"If not defined, will default to `[python].default_resolve`.\n\n"
"Only applies if `[python].enable_resolves` is true.\n\n"
"This field is experimental and may change without the normal deprecation policy."
# TODO: Document expectations for dependencies once we validate that.
)

def normalized_value(self, python_setup: PythonSetup) -> tuple[str, ...]:
"""Get the value after applying the default and validating every key is recognized."""
value_or_default = self.value or (python_setup.default_resolve,)
invalid_resolves = set(value_or_default) - set(python_setup.resolves)
if invalid_resolves:
raise UnrecognizedResolveNamesError(
sorted(invalid_resolves),
python_setup.resolves.keys(),
description_of_origin=f"the field `{self.alias}` in the target {self.address}",
)
return value_or_default


# -----------------------------------------------------------------------------------------------
# `pex_binary` and `pex_binaries` target
# -----------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -775,6 +752,7 @@ class PythonSourceTarget(Target):
*COMMON_TARGET_FIELDS,
InterpreterConstraintsField,
Dependencies,
PythonResolveField,
PythonSourceField,
)
help = "A single Python source file."
Expand Down Expand Up @@ -812,6 +790,7 @@ class PythonTestUtilsGeneratorTarget(Target):
*COMMON_TARGET_FIELDS,
InterpreterConstraintsField,
Dependencies,
PythonResolveField,
PythonTestUtilsGeneratingSourcesField,
PythonSourcesOverridesField,
)
Expand All @@ -831,6 +810,7 @@ class PythonSourcesGeneratorTarget(Target):
*COMMON_TARGET_FIELDS,
InterpreterConstraintsField,
Dependencies,
PythonResolveField,
PythonSourcesGeneratingSourcesField,
PythonSourcesOverridesField,
)
Expand Down Expand Up @@ -975,20 +955,33 @@ def normalize_module_mapping(
return FrozenDict({canonicalize_project_name(k): tuple(v) for k, v in (mapping or {}).items()})


class PythonRequirementCompatibleResolvesField(PythonCompatibleResolvesField):
class PythonRequirementCompatibleResolvesField(StringSequenceField, AsyncFieldMixin):
alias = "experimental_compatible_resolves"
required = False
help = (
"The resolves from `[python].experimental_resolves` that this requirement should be "
"included in.\n\n"
"If not defined, will default to `[python].default_resolve`.\n\n"
"When generating a lockfile for a particular resolve via the `generate-lockfiles` goal, "
"it will include all requirements that are declared compatible with that resolve. "
"First-party targets like `python_source` and `pex_binary` then declare which resolve(s) "
"they use via the `experimental_resolve` and `experimental_compatible_resolves` field; so, "
"for your first-party code to use a particular `python_requirement` target, that "
"requirement must be included in the resolve(s) "
"First-party targets like `python_source` and `pex_binary` then declare which resolve "
"they use via the `experimental_resolve` field; so, for your first-party code to use a "
"particular `python_requirement` target, that requirement must be included in the resolve "
"used by that code."
)

def normalized_value(self, python_setup: PythonSetup) -> tuple[str, ...]:
"""Get the value after applying the default and validating every key is recognized."""
value_or_default = self.value or (python_setup.default_resolve,)
invalid_resolves = set(value_or_default) - set(python_setup.resolves)
if invalid_resolves:
raise UnrecognizedResolveNamesError(
sorted(invalid_resolves),
python_setup.resolves.keys(),
description_of_origin=f"the field `{self.alias}` in the target {self.address}",
)
return value_or_default


class PythonRequirementTarget(Target):
alias = "python_requirement"
Expand Down
Loading

0 comments on commit 0488605

Please sign in to comment.