Skip to content

Commit

Permalink
Use a list of requirement constraints for lockfile invalidation (pant…
Browse files Browse the repository at this point in the history
…sbuild#16469)

Improves upon pantsbuild#16420. @huonw wisely suggested we avoid using `hash` for constraints files because it makes Git diffs and merge conflicts much worse. python-poetry/poetry#496 rust-lang/cargo#7070

[ci skip-rust]
[ci skip-build-wheels]
  • Loading branch information
Eric-Arellano authored and cczona committed Sep 1, 2022
1 parent 85d7c67 commit 340a768
Show file tree
Hide file tree
Showing 10 changed files with 125 additions and 108 deletions.
50 changes: 26 additions & 24 deletions src/python/pants/backend/python/goals/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
)
from pants.core.util_rules.lockfile_metadata import calculate_invalidation_digest
from pants.engine.fs import CreateDigest, Digest, DigestContents, FileContent, MergeDigests
from pants.engine.internals.native_engine import FileDigest
from pants.engine.process import ProcessCacheScope, ProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule, rule_helper
from pants.engine.target import AllTargets
Expand Down Expand Up @@ -155,35 +154,41 @@ def warn_python_repos(option: str) -> None:
return MaybeWarnPythonRepos()


@dataclass(frozen=True)
class _PipArgsAndConstraintsSetup:
args: tuple[str, ...]
digest: Digest
constraints: FrozenOrderedSet[PipRequirement]


@rule_helper
async def _setup_pip_args_and_constraints_file(
python_setup: PythonSetup, *, resolve_name: str
) -> tuple[list[str], Digest, FileDigest | None]:
extra_args = []
extra_digests = []
constraints_file_digest: None | FileDigest = None
) -> _PipArgsAndConstraintsSetup:
args = []
digests = []

if python_setup.no_binary or python_setup.only_binary:
pip_args_file = "__pip_args.txt"
extra_args.extend(["-r", pip_args_file])
args.extend(["-r", pip_args_file])
pip_args_file_content = "\n".join(
[f"--no-binary {pkg}" for pkg in python_setup.no_binary]
+ [f"--only-binary {pkg}" for pkg in python_setup.only_binary]
)
pip_args_digest = await Get(
Digest, CreateDigest([FileContent(pip_args_file, pip_args_file_content.encode())])
)
extra_digests.append(pip_args_digest)
digests.append(pip_args_digest)

constraints: FrozenOrderedSet[PipRequirement] = FrozenOrderedSet()
resolve_config = await Get(ResolvePexConfig, ResolvePexConfigRequest(resolve_name))
if resolve_config.constraints_file:
_constraints_file_entry = resolve_config.constraints_file[1]
extra_args.append(f"--constraints={_constraints_file_entry.path}")
constraints_file_digest = _constraints_file_entry.file_digest
extra_digests.append(resolve_config.constraints_file[0])
args.append(f"--constraints={resolve_config.constraints_file.path}")
digests.append(resolve_config.constraints_file.digest)
constraints = resolve_config.constraints_file.constraints

input_digest = await Get(Digest, MergeDigests(extra_digests))
return extra_args, input_digest, constraints_file_digest
input_digest = await Get(Digest, MergeDigests(digests))
return _PipArgsAndConstraintsSetup(tuple(args), input_digest, constraints)


@rule(desc="Generate Python lockfile", level=LogLevel.DEBUG)
Expand All @@ -194,16 +199,13 @@ async def generate_lockfile(
python_repos: PythonRepos,
python_setup: PythonSetup,
) -> GenerateLockfileResult:
constraints_file_hash: str | None = None
requirement_constraints: FrozenOrderedSet[PipRequirement] = FrozenOrderedSet()

if req.use_pex:
(
extra_args,
input_digest,
constraints_file_digest,
) = await _setup_pip_args_and_constraints_file(python_setup, resolve_name=req.resolve_name)
if constraints_file_digest:
constraints_file_hash = constraints_file_digest.fingerprint
pip_args_setup = await _setup_pip_args_and_constraints_file(
python_setup, resolve_name=req.resolve_name
)
requirement_constraints = pip_args_setup.constraints

header_delimiter = "//"
result = await Get(
Expand Down Expand Up @@ -233,13 +235,13 @@ async def generate_lockfile(
"mac",
# This makes diffs more readable when lockfiles change.
"--indent=2",
*extra_args,
*pip_args_setup.args,
*python_repos.pex_args,
*python_setup.manylinux_pex_args,
*req.interpreter_constraints.generate_pex_arg_list(),
*req.requirements,
),
additional_input_digest=input_digest,
additional_input_digest=pip_args_setup.digest,
output_files=("lock.json",),
description=f"Generate lockfile for {req.resolve_name}",
# Instead of caching lockfile generation with LMDB, we instead use the invalidation
Expand Down Expand Up @@ -306,7 +308,7 @@ async def generate_lockfile(
metadata = PythonLockfileMetadata.new(
valid_for_interpreter_constraints=req.interpreter_constraints,
requirements={PipRequirement.parse(i) for i in req.requirements},
constraints_file_hash=constraints_file_hash,
requirement_constraints=set(requirement_constraints),
)
lockfile_with_header = metadata.add_header_to_lockfile(
initial_lockfile_digest_contents[0].content,
Expand Down
50 changes: 29 additions & 21 deletions src/python/pants/backend/python/goals/lockfile_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def _generate(
rule_runner: RuleRunner,
use_pex: bool,
ansicolors_version: str = "==1.1.8",
constraints_file_hash: str | None = None,
requirement_constraints_str: str = '// "requirement_constraints": []',
) -> str:
result = rule_runner.request(
GenerateLockfileResult,
Expand All @@ -64,24 +64,29 @@ def _generate(
if not use_pex:
return content

constraints_file_hash_str = f'"{constraints_file_hash}"' if constraints_file_hash else "null"
pex_header = dedent(
f"""\
// This lockfile was autogenerated by Pants. To regenerate, run:
//
// ./pants generate-lockfiles --resolve=test
//
// --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
// {{
// "version": 3,
// "valid_for_interpreter_constraints": [],
// "generated_with_requirements": [
// "ansicolors{ansicolors_version}"
// ],
// "constraints_file_hash": {constraints_file_hash_str}
// }}
// --- END PANTS LOCKFILE METADATA ---
"""
pex_header = (
dedent(
f"""\
// This lockfile was autogenerated by Pants. To regenerate, run:
//
// ./pants generate-lockfiles --resolve=test
//
// --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
// {{
// "version": 3,
// "valid_for_interpreter_constraints": [],
// "generated_with_requirements": [
// "ansicolors{ansicolors_version}"
// ],
"""
)
+ requirement_constraints_str
+ dedent(
"""
// }
// --- END PANTS LOCKFILE METADATA ---
"""
)
)
assert content.startswith(pex_header)
return strip_prefix(content, pex_header)
Expand Down Expand Up @@ -167,8 +172,11 @@ def test_constraints_file(rule_runner: RuleRunner) -> None:
rule_runner=rule_runner,
use_pex=True,
ansicolors_version=">=1.0",
constraints_file_hash=(
"1999760ce9dd0f82847def308992e3345592fc9e77a937c1e9bbb78a42ae3943"
requirement_constraints_str=dedent(
"""\
// "requirement_constraints": [
// "ansicolors==1.1.7"
// ]"""
),
)
)
Expand Down
3 changes: 3 additions & 0 deletions src/python/pants/backend/python/pip_requirement.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,8 @@ def __eq__(self, other):
return False
return self._req == other._req

def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._req})"

def __str__(self):
return str(self._req)
41 changes: 18 additions & 23 deletions src/python/pants/backend/python/util_rules/lockfile_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def new(
*,
valid_for_interpreter_constraints: InterpreterConstraints,
requirements: set[PipRequirement],
constraints_file_hash: str | None,
requirement_constraints: set[PipRequirement],
) -> PythonLockfileMetadata:
"""Call the most recent version of the `LockfileMetadata` class to construct a concrete
instance.
Expand All @@ -50,7 +50,7 @@ def new(
"""

return PythonLockfileMetadataV3(
valid_for_interpreter_constraints, requirements, constraints_file_hash
valid_for_interpreter_constraints, requirements, requirement_constraints
)

@classmethod
Expand All @@ -70,7 +70,7 @@ def is_valid_for(
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
constraints_file_path_and_hash: tuple[str, str] | None,
requirement_constraints: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
"""Returns Truthy if this `PythonLockfileMetadata` can be used in the current execution
context."""
Expand Down Expand Up @@ -114,7 +114,7 @@ def is_valid_for(
interpreter_universe: Iterable[str],
# Everything below is not used by v1.
user_requirements: Iterable[PipRequirement],
constraints_file_path_and_hash: tuple[str, str] | None,
requirement_constraints: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons: set[InvalidPythonLockfileReason] = set()

Expand Down Expand Up @@ -168,13 +168,7 @@ def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadataV2, instance)
# Requirements need to be stringified then sorted so that tests are deterministic. Sorting
# followed by stringifying does not produce a meaningful result.
return {
"generated_with_requirements": (
sorted(str(i) for i in instance.requirements)
if instance.requirements is not None
else None
)
}
return {"generated_with_requirements": (sorted(str(i) for i in instance.requirements))}

def is_valid_for(
self,
Expand All @@ -185,7 +179,7 @@ def is_valid_for(
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
# Everything below is not used by V2.
constraints_file_path_and_hash: tuple[str, str] | None,
requirement_constraints: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons = set()

Expand All @@ -210,7 +204,7 @@ def is_valid_for(
class PythonLockfileMetadataV3(PythonLockfileMetadataV2):
"""Lockfile version that considers constraints files."""

constraints_file_hash: str | None
requirement_constraints: set[PipRequirement]

@classmethod
def _from_json_dict(
Expand All @@ -221,19 +215,23 @@ def _from_json_dict(
) -> PythonLockfileMetadataV3:
v2_metadata = super()._from_json_dict(json_dict, lockfile_description, error_suffix)
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
constraints_file_hash = metadata(
"constraints_file_hash", str, lambda x: x # type: ignore[no-any-return]
requirement_constraints = metadata(
"requirement_constraints",
Set[PipRequirement],
lambda l: {PipRequirement.parse(i) for i in l},
)
return PythonLockfileMetadataV3(
valid_for_interpreter_constraints=v2_metadata.valid_for_interpreter_constraints,
requirements=v2_metadata.requirements,
constraints_file_hash=constraints_file_hash,
requirement_constraints=requirement_constraints,
)

@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadataV3, instance)
return {"constraints_file_hash": instance.constraints_file_hash}
return {
"requirement_constraints": (sorted(str(i) for i in instance.requirement_constraints))
}

def is_valid_for(
self,
Expand All @@ -243,7 +241,7 @@ def is_valid_for(
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
constraints_file_path_and_hash: tuple[str, str] | None,
requirement_constraints: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons = (
super()
Expand All @@ -253,14 +251,11 @@ def is_valid_for(
user_interpreter_constraints=user_interpreter_constraints,
interpreter_universe=interpreter_universe,
user_requirements=user_requirements,
constraints_file_path_and_hash=constraints_file_path_and_hash,
requirement_constraints=requirement_constraints,
)
.failure_reasons
)

provided_constraints_file_hash = (
constraints_file_path_and_hash[1] if constraints_file_path_and_hash else None
)
if provided_constraints_file_hash != self.constraints_file_hash:
if self.requirement_constraints != set(requirement_constraints):
failure_reasons.add(InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_metadata_header_round_trip() -> None:
["CPython==2.7.*", "PyPy", "CPython>=3.6,<4,!=3.7.*"]
),
requirements=reqset("ansicolors==0.1.0"),
constraints_file_hash="abc",
requirement_constraints={PipRequirement.parse("constraint")},
)
serialized_lockfile = input_metadata.add_header_to_lockfile(
b"req1==1.0", regenerate_command="./pants lock", delimeter="#"
Expand Down Expand Up @@ -62,7 +62,7 @@ def test_add_header_to_lockfile() -> None:
# "generated_with_requirements": [
# "ansicolors==0.1.0"
# ],
# "constraints_file_hash": null
# "requirement_constraints": []
# }
# --- END PANTS LOCKFILE METADATA ---
dave==3.1.4 \\
Expand All @@ -75,7 +75,7 @@ def line_by_line(b: bytes) -> list[bytes]:
metadata = PythonLockfileMetadata.new(
valid_for_interpreter_constraints=InterpreterConstraints([">=3.7"]),
requirements=reqset("ansicolors==0.1.0"),
constraints_file_hash=None,
requirement_constraints=set(),
)
result = metadata.add_header_to_lockfile(
input_lockfile, regenerate_command="./pants lock", delimeter="#"
Expand Down Expand Up @@ -158,7 +158,7 @@ def test_is_valid_for_v1(user_digest, expected_digest, user_ic, expected_ic, mat
user_interpreter_constraints=InterpreterConstraints(user_ic),
interpreter_universe=INTERPRETER_UNIVERSE,
user_requirements=set(),
constraints_file_path_and_hash=None,
requirement_constraints=set(),
)
)
== matches
Expand Down Expand Up @@ -232,7 +232,7 @@ def test_is_valid_for_interpreter_constraints_and_requirements(
for m in [
PythonLockfileMetadataV2(InterpreterConstraints(lock_ics), reqset(*lock_reqs)),
PythonLockfileMetadataV3(
InterpreterConstraints(lock_ics), reqset(*lock_reqs), constraints_file_hash=None
InterpreterConstraints(lock_ics), reqset(*lock_reqs), requirement_constraints=set()
),
]:
result = m.is_valid_for(
Expand All @@ -241,21 +241,21 @@ def test_is_valid_for_interpreter_constraints_and_requirements(
user_interpreter_constraints=InterpreterConstraints(user_ics),
interpreter_universe=INTERPRETER_UNIVERSE,
user_requirements=reqset(*user_reqs),
constraints_file_path_and_hash=None,
requirement_constraints=set(),
)
assert result.failure_reasons == set(expected)


@pytest.mark.parametrize("is_tool", [True, False])
def test_is_valid_for_constraints_file_hash(is_tool: bool) -> None:
def test_is_valid_for_requirement_constraints(is_tool: bool) -> None:
result = PythonLockfileMetadataV3(
InterpreterConstraints([]), reqset(), constraints_file_hash="abc"
InterpreterConstraints([]), reqset(), requirement_constraints={PipRequirement.parse("c1")}
).is_valid_for(
is_tool=is_tool,
expected_invalidation_digest="",
user_interpreter_constraints=InterpreterConstraints([]),
interpreter_universe=INTERPRETER_UNIVERSE,
user_requirements=reqset(),
constraints_file_path_and_hash=("c.txt", "xyz"),
requirement_constraints={PipRequirement.parse("c2")},
)
assert result.failure_reasons == {InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH}
Loading

0 comments on commit 340a768

Please sign in to comment.