Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add [python].resolves_to_constraints_file #16420

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 54 additions & 16 deletions src/python/pants/backend/python/goals/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,11 @@
from pants.backend.python.util_rules.lockfile_metadata import PythonLockfileMetadata
from pants.backend.python.util_rules.pex import PexRequest, VenvPex, VenvPexProcess
from pants.backend.python.util_rules.pex_cli import PexCliProcess
from pants.backend.python.util_rules.pex_requirements import PexRequirements
from pants.backend.python.util_rules.pex_requirements import (
PexRequirements,
ResolvePexConfig,
ResolvePexConfigRequest,
)
from pants.core.goals.generate_lockfiles import (
GenerateLockfile,
GenerateLockfileResult,
Expand All @@ -40,9 +44,10 @@
WrappedGenerateLockfile,
)
from pants.core.util_rules.lockfile_metadata import calculate_invalidation_digest
from pants.engine.fs import CreateDigest, Digest, DigestContents, FileContent
from pants.engine.fs import CreateDigest, Digest, DigestContents, FileContent, MergeDigests
from pants.engine.internals.native_engine import FileDigest
from pants.engine.process import ProcessCacheScope, ProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.rules import Get, MultiGet, collect_rules, rule, rule_helper
from pants.engine.target import AllTargets
from pants.engine.unions import UnionRule
from pants.util.docutil import bin_name
Expand Down Expand Up @@ -147,6 +152,37 @@ def warn_python_repos(option: str) -> None:
return MaybeWarnPythonRepos()


@rule_helper
async def _setup_pip_args_and_constraints_file(
python_setup: PythonSetup, *, resolve_name: str
) -> tuple[list[str], Digest, FileDigest | None]:
extra_args = []
extra_digests = []
constraints_file_digest: None | FileDigest = None

if python_setup.no_binary or python_setup.only_binary:
pip_args_file = "__pip_args.txt"
extra_args.extend(["-r", pip_args_file])
pip_args_file_content = "\n".join(
[f"--no-binary {pkg}" for pkg in python_setup.no_binary]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was pre-existing, but changing the order of the python_setup args produces a new file here and the subsequent downstream invalidations that flow when it need not, --no-binary / --only-binary is not an order-sensitive thing.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was pre-existing

My next PR will likely be reworking this mechanism to be resolve-specific, so I can clean that up there.

+ [f"--only-binary {pkg}" for pkg in python_setup.only_binary]
)
pip_args_digest = await Get(
Digest, CreateDigest([FileContent(pip_args_file, pip_args_file_content.encode())])
)
extra_digests.append(pip_args_digest)

resolve_config = await Get(ResolvePexConfig, ResolvePexConfigRequest(resolve_name))
if resolve_config.constraints_file:
_constraints_file_entry = resolve_config.constraints_file[1]
extra_args.append(f"--constraints={_constraints_file_entry.path}")
constraints_file_digest = _constraints_file_entry.file_digest
extra_digests.append(resolve_config.constraints_file[0])

input_digest = await Get(Digest, MergeDigests(extra_digests))
return extra_args, input_digest, constraints_file_digest


@rule(desc="Generate Python lockfile", level=LogLevel.DEBUG)
async def generate_lockfile(
req: GeneratePythonLockfile,
Expand All @@ -155,15 +191,17 @@ async def generate_lockfile(
python_repos: PythonRepos,
python_setup: PythonSetup,
) -> GenerateLockfileResult:
constraints_file_hash: str | None = None

if req.use_pex:
pip_args_file = "__pip_args.txt"
pip_args_file_content = "\n".join(
[f"--no-binary {pkg}" for pkg in python_setup.no_binary]
+ [f"--only-binary {pkg}" for pkg in python_setup.only_binary]
)
pip_args_file_digest = await Get(
Digest, CreateDigest([FileContent(pip_args_file, pip_args_file_content.encode())])
)
(
extra_args,
input_digest,
constraints_file_digest,
) = await _setup_pip_args_and_constraints_file(python_setup, resolve_name=req.resolve_name)
if constraints_file_digest:
constraints_file_hash = constraints_file_digest.fingerprint

header_delimiter = "//"
result = await Get(
ProcessResult,
Expand Down Expand Up @@ -192,14 +230,13 @@ async def generate_lockfile(
"mac",
# This makes diffs more readable when lockfiles change.
"--indent=2",
"-r",
pip_args_file,
*extra_args,
*python_repos.pex_args,
*python_setup.manylinux_pex_args,
*req.interpreter_constraints.generate_pex_arg_list(),
*req.requirements,
),
additional_input_digest=pip_args_file_digest,
additional_input_digest=input_digest,
output_files=("lock.json",),
description=f"Generate lockfile for {req.resolve_name}",
# Instead of caching lockfile generation with LMDB, we instead use the invalidation
Expand Down Expand Up @@ -264,8 +301,9 @@ async def generate_lockfile(
initial_lockfile_digest_contents = await Get(DigestContents, Digest, result.output_digest)
# TODO(#12314) Improve error message on `Requirement.parse`
metadata = PythonLockfileMetadata.new(
req.interpreter_constraints,
{PipRequirement.parse(i) for i in req.requirements},
valid_for_interpreter_constraints=req.interpreter_constraints,
requirements={PipRequirement.parse(i) for i in req.requirements},
constraints_file_hash=constraints_file_hash,
)
lockfile_with_header = metadata.add_header_to_lockfile(
initial_lockfile_digest_contents[0].content,
Expand Down
116 changes: 76 additions & 40 deletions src/python/pants/backend/python/goals/lockfile_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,31 @@
from pants.util.strutil import strip_prefix


def _generate(*, rule_runner: RuleRunner, use_pex: bool) -> str:
@pytest.fixture
def rule_runner() -> RuleRunner:
rule_runner = RuleRunner(
rules=[
*lockfile_rules(),
*pex.rules(),
QueryRule(GenerateLockfileResult, [GeneratePythonLockfile]),
]
)
rule_runner.set_options([], env_inherit=PYTHON_BOOTSTRAP_ENV)
return rule_runner


def _generate(
*,
rule_runner: RuleRunner,
use_pex: bool,
ansicolors_version: str = "==1.1.8",
constraints_file_hash: str | None = None,
) -> str:
result = rule_runner.request(
GenerateLockfileResult,
[
GeneratePythonLockfile(
requirements=FrozenOrderedSet(["ansicolors==1.1.8"]),
requirements=FrozenOrderedSet([f"ansicolors{ansicolors_version}"]),
interpreter_constraints=InterpreterConstraints(),
resolve_name="test",
lockfile_dest="test.lock",
Expand All @@ -41,19 +60,34 @@ def _generate(*, rule_runner: RuleRunner, use_pex: bool) -> str:
)
digest_contents = rule_runner.request(DigestContents, [result.digest])
assert len(digest_contents) == 1
return digest_contents[0].content.decode()
content = digest_contents[0].content.decode()
if not use_pex:
return content


def test_poetry_lockfile_generation() -> None:
rule_runner = RuleRunner(
rules=[
*lockfile_rules(),
*pex.rules(),
QueryRule(GenerateLockfileResult, [GeneratePythonLockfile]),
]
constraints_file_hash_str = f'"{constraints_file_hash}"' if constraints_file_hash else "null"
pex_header = dedent(
f"""\
// This lockfile was autogenerated by Pants. To regenerate, run:
//
// ./pants generate-lockfiles --resolve=test
//
// --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
// {{
// "version": 3,
// "valid_for_interpreter_constraints": [],
Eric-Arellano marked this conversation as resolved.
Show resolved Hide resolved
// "generated_with_requirements": [
// "ansicolors{ansicolors_version}"
// ],
// "constraints_file_hash": {constraints_file_hash_str}
// }}
// --- END PANTS LOCKFILE METADATA ---
"""
)
rule_runner.set_options([], env_inherit=PYTHON_BOOTSTRAP_ENV)
assert content.startswith(pex_header)
return strip_prefix(content, pex_header)


def test_poetry_lockfile_generation(rule_runner: RuleRunner) -> None:
poetry_lock = _generate(rule_runner=rule_runner, use_pex=False)
assert poetry_lock.startswith("# This lockfile was autogenerated by Pants.")
assert poetry_lock.rstrip().endswith(
Expand All @@ -69,41 +103,17 @@ def test_poetry_lockfile_generation() -> None:
@pytest.mark.parametrize(
("no_binary", "only_binary"), ((False, False), (False, True), (True, False))
)
def test_pex_lockfile_generation(no_binary: bool, only_binary: bool) -> None:
rule_runner = RuleRunner(
rules=[
*lockfile_rules(),
*pex.rules(),
QueryRule(GenerateLockfileResult, [GeneratePythonLockfile]),
]
)
def test_pex_lockfile_generation(
rule_runner: RuleRunner, no_binary: bool, only_binary: bool
) -> None:
args = []
if no_binary:
args.append("--python-no-binary=ansicolors")
if only_binary:
args.append("--python-only-binary=ansicolors")
rule_runner.set_options(args, env_inherit=PYTHON_BOOTSTRAP_ENV)

pex_header = dedent(
"""\
// This lockfile was autogenerated by Pants. To regenerate, run:
//
// ./pants generate-lockfiles --resolve=test
//
// --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
// {
// "version": 2,
// "valid_for_interpreter_constraints": [],
// "generated_with_requirements": [
// "ansicolors==1.1.8"
// ]
// }
// --- END PANTS LOCKFILE METADATA ---
"""
)
pex_lock = _generate(rule_runner=rule_runner, use_pex=True)
assert pex_lock.startswith(pex_header)
lock_entry = json.loads(strip_prefix(pex_lock, pex_header))
lock_entry = json.loads(_generate(rule_runner=rule_runner, use_pex=True))
reqs = lock_entry["locked_resolves"][0]["locked_requirements"]
assert len(reqs) == 1
assert reqs[0]["project_name"] == "ansicolors"
Expand Down Expand Up @@ -142,6 +152,32 @@ def test_pex_lockfile_generation(no_binary: bool, only_binary: bool) -> None:
assert artifacts == [wheel]


def test_constraints_file(rule_runner: RuleRunner) -> None:
rule_runner.write_files({"constraints.txt": "ansicolors==1.1.7"})
rule_runner.set_options(
[
"--python-resolves={'test': 'foo.lock'}",
"--python-resolves-to-constraints-file={'test': 'constraints.txt'}",
],
env_inherit=PYTHON_BOOTSTRAP_ENV,
)

lock_entry = json.loads(
_generate(
rule_runner=rule_runner,
use_pex=True,
ansicolors_version=">=1.0",
constraints_file_hash=(
"1999760ce9dd0f82847def308992e3345592fc9e77a937c1e9bbb78a42ae3943"
),
)
)
reqs = lock_entry["locked_resolves"][0]["locked_requirements"]
assert len(reqs) == 1
assert reqs[0]["project_name"] == "ansicolors"
assert reqs[0]["version"] == "1.1.7"


def test_multiple_resolves() -> None:
rule_runner = RuleRunner(
rules=[
Expand Down
62 changes: 50 additions & 12 deletions src/python/pants/backend/python/subsystems/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import os
from typing import Iterable, Iterator, Optional, cast

from pants.core.goals.generate_lockfiles import UnrecognizedResolveNamesError
from pants.option.option_types import (
BoolOption,
DictOption,
Expand All @@ -18,7 +19,7 @@
)
from pants.option.subsystem import Subsystem
from pants.util.docutil import bin_name, doc_url
from pants.util.memo import memoized_property
from pants.util.memo import memoized_method, memoized_property
from pants.util.strutil import softwrap

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -204,7 +205,32 @@ class PythonSetup(Subsystem):
using a resolve whose interpreter constraints are set to ['==3.7.*'], then
Pants will error explaining the incompatibility.

The keys must be defined as resolves in `[python].resolves`.
The keys must be defined as resolves in `[python].resolves`. To change the interpreter
constraints for tool lockfiles, change `[tool].interpreter_constraints`, e.g.
`[black].interpreter_constraints`; if the tool does not have that option, it determines
its interpreter constraints from your user code.
"""
),
advanced=True,
)
_resolves_to_constraints_file = DictOption[str](
help=softwrap(
"""
When generating a resolve's lockfile, use a constraints file to pin the version of
certain requirements. This is particularly useful to pin the versions of transitive
dependencies of your direct requirements.

See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more information on
the format of constraint files and how constraints are applied in Pex and pip.

Expects a dictionary of resolve names from `[python].resolves` and Python tools (e.g.
`black` and `pytest`) to file paths for
constraints files. For example,
`{'data-science': '3rdparty/data-science-constraints.txt'}`.
If a resolve is not set in the dictionary, it will not use a constraints file.

Note: Only takes effect if you use Pex lockfiles. Use the default
`[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
"""
),
advanced=True,
Expand Down Expand Up @@ -491,21 +517,33 @@ def generate_lockfiles_with_pex(self) -> bool:
@memoized_property
def resolves_to_interpreter_constraints(self) -> dict[str, tuple[str, ...]]:
result = {}
unrecognized_resolves = []
for resolve, ics in self._resolves_to_interpreter_constraints.items():
if resolve not in self.resolves:
raise KeyError(
softwrap(
f"""
Unrecognized resolve name in the option
`[python].resolves_to_interpreter_constraints`: {resolve}. Each
key must be one of the keys in `[python].resolves`:
{sorted(self.resolves.keys())}
"""
)
)
unrecognized_resolves.append(resolve)
result[resolve] = tuple(ics)
if unrecognized_resolves:
raise UnrecognizedResolveNamesError(
unrecognized_resolves,
self.resolves.keys(),
description_of_origin="the option `[python].resolves_to_interpreter_constraints`",
)
return result

@memoized_method
def resolves_to_constraints_file(
self, all_tool_resolve_names: tuple[str, ...]
) -> dict[str, str]:
all_valid_resolves = {*self.resolves, *all_tool_resolve_names}
unrecognized_resolves = set(self._resolves_to_constraints_file.keys()) - all_valid_resolves
if unrecognized_resolves:
raise UnrecognizedResolveNamesError(
sorted(unrecognized_resolves),
all_valid_resolves,
description_of_origin="the option `[python].resolves_to_constraints_file`",
)
return self._resolves_to_constraints_file

def resolve_all_constraints_was_set_explicitly(self) -> bool:
return not self.options.is_default("resolve_all_constraints")

Expand Down
3 changes: 2 additions & 1 deletion src/python/pants/backend/python/subsystems/setup_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import pytest

from pants.backend.python.subsystems.setup import PythonSetup
from pants.core.goals.generate_lockfiles import UnrecognizedResolveNamesError
from pants.testutil.option_util import create_subsystem


Expand All @@ -18,5 +19,5 @@ def create(resolves_to_ics: dict[str, list[str]]) -> dict[str, tuple[str, ...]]:
).resolves_to_interpreter_constraints

assert create({"a": ["==3.7.*"]}) == {"a": ("==3.7.*",)}
with pytest.raises(KeyError):
with pytest.raises(UnrecognizedResolveNamesError):
create({"fake": []})
Loading