Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[internal] Expect lockfile metadata to be defined #12616

Merged
merged 2 commits into from
Aug 23, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 46 additions & 52 deletions src/python/pants/backend/experimental/python/lockfile_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,22 @@
import hashlib
import json
from dataclasses import dataclass
from typing import Any, Callable, Iterable, TypeVar
from typing import Any, Iterable

from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints

BEGIN_LOCKFILE_HEADER = b"# --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---"
END_LOCKFILE_HEADER = b"# --- END PANTS LOCKFILE METADATA ---"


class InvalidLockfileError(Exception):
pass


@dataclass
class LockfileMetadata:
requirements_invalidation_digest: str | None
valid_for_interpreter_constraints: InterpreterConstraints | None
requirements_invalidation_digest: str
valid_for_interpreter_constraints: InterpreterConstraints

@classmethod
def from_lockfile(cls, lockfile: bytes) -> LockfileMetadata:
Expand All @@ -32,34 +36,47 @@ def from_lockfile(cls, lockfile: bytes) -> LockfileMetadata:
elif in_metadata_block:
metadata_lines.append(line[2:])

if not metadata_lines:
# TODO(#12314): Add a good error.
raise InvalidLockfileError("")

try:
metadata = json.loads(b"\n".join(metadata_lines))
except json.decoder.JSONDecodeError:
# TODO(#12314): Add a good error.
raise

T = TypeVar("T")
raise InvalidLockfileError("")

def coerce(t: Callable[[Any], T], key: str) -> T | None:
"""Gets a value from `metadata`, coercing it to type `t` if not `None`."""
v = metadata.get(key, None)
def get_or_raise(key: str) -> Any:
try:
return t(v) if v is not None else None
except Exception:
# TODO: this should trigger error/warning behavior
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it should always be an error if the metadata is malformed, rather than a warning. That [python-setup] option imo is only whether to warn vs error if the metadata is stale, but not if the metadata is missing or malformed.

return None

return LockfileMetadata(
requirements_invalidation_digest=coerce(str, "requirements_invalidation_digest"),
valid_for_interpreter_constraints=coerce(
InterpreterConstraints, "valid_for_interpreter_constraints"
),
)
return metadata[key]
except KeyError:
# TODO(#12314): Add a good error about the key not being defined.
raise InvalidLockfileError("")

requirements_digest = get_or_raise("requirements_invalidation_digest")
if not isinstance(requirements_digest, str):
# TODO(#12314): Add a good error about invalid data type.
raise InvalidLockfileError("")

try:
interpreter_constraints = InterpreterConstraints(
get_or_raise("valid_for_interpreter_constraints")
)
except TypeError:
# TODO(#12314): Add a good error about invalid data type.
raise InvalidLockfileError("")

return LockfileMetadata(requirements_digest, interpreter_constraints)

def add_header_to_lockfile(self, lockfile: bytes, *, regenerate_command: str) -> bytes:
metadata_as_a_comment = "\n".join(f"# {i}" for i in self._to_json().splitlines()).encode(
"ascii"
)
metadata_dict = {
"requirements_invalidation_digest": self.requirements_invalidation_digest,
"valid_for_interpreter_constraints": [
str(ic) for ic in self.valid_for_interpreter_constraints
],
}
metadata_json = json.dumps(metadata_dict, ensure_ascii=True, indent=2).splitlines()
metadata_as_a_comment = "\n".join(f"# {l}" for l in metadata_json).encode("ascii")
header = b"%b\n%b\n%b" % (BEGIN_LOCKFILE_HEADER, metadata_as_a_comment, END_LOCKFILE_HEADER)

regenerate_command_bytes = (
Expand All @@ -69,43 +86,20 @@ def add_header_to_lockfile(self, lockfile: bytes, *, regenerate_command: str) ->

return b"%b\n#\n%b\n\n%b" % (regenerate_command_bytes, header, lockfile)

def _to_json(self) -> str:
"""Produces a JSON-encoded dictionary that represents the contents of this metadata."""
constraints = self.valid_for_interpreter_constraints
metadata = {
"requirements_invalidation_digest": self.requirements_invalidation_digest,
"valid_for_interpreter_constraints": [str(i) for i in constraints]
if constraints
else None,
}
return json.dumps(metadata, ensure_ascii=True, indent=2)

def is_valid_for(
self,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
) -> bool:
"""Returns True if this `LockfileMetadata` represents a lockfile that can be used in the
current execution context.

A lockfile can be used in the current execution context if `expected_invalidation_digest ==
requirements_invalidation_digest`, and if `user_interpreter_constraints` matches only
interpreters specified by `valid_for_interpreter_constraints`.
"""

"""Returns True if the lockfile can be used in the current execution context."""
if expected_invalidation_digest is None:
return True

if self.requirements_invalidation_digest != expected_invalidation_digest:
return False

if self.valid_for_interpreter_constraints is None:
# This lockfile matches all interpreter constraints (TODO: check this)
return True

return self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
return (
self.requirements_invalidation_digest == expected_invalidation_digest
and self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
)
)


Expand Down