From 3fc6a9e723d98fea0d9f23bc9368f0498f7732c9 Mon Sep 17 00:00:00 2001 From: Eric Arellano Date: Thu, 6 May 2021 00:48:52 -0700 Subject: [PATCH] Rename `PexInterpreterConstraints` to `InterpreterConstraints` and move to dedicated file --- .../pants/backend/awslambda/python/rules.py | 4 +- .../backend/codegen/protobuf/python/rules.py | 4 +- .../dependency_inference/import_parser.py | 6 +- .../import_parser_test.py | 4 +- .../python/dependency_inference/rules.py | 4 +- .../pants/backend/python/goals/coverage_py.py | 11 +- .../backend/python/goals/pytest_runner.py | 6 +- .../pants/backend/python/goals/setup_py.py | 13 +- .../pants/backend/python/lint/bandit/rules.py | 13 +- .../pants/backend/python/lint/black/rules.py | 13 +- .../backend/python/lint/docformatter/rules.py | 11 +- .../pants/backend/python/lint/flake8/rules.py | 13 +- .../pants/backend/python/lint/isort/rules.py | 4 +- .../pants/backend/python/lint/pylint/rules.py | 8 +- .../py_constraints.py | 10 +- .../backend/python/typecheck/mypy/rules.py | 10 +- .../util_rules/interpreter_constraints.py | 240 ++++++++++++++++ .../interpreter_constraints_test.py | 265 +++++++++++++++++ .../pants/backend/python/util_rules/pex.py | 242 +--------------- .../python/util_rules/pex_from_targets.py | 12 +- .../backend/python/util_rules/pex_test.py | 266 +----------------- src/python/pants/init/plugin_resolver.py | 17 +- .../pants_test/init/test_plugin_resolver.py | 17 +- 23 files changed, 590 insertions(+), 603 deletions(-) create mode 100644 src/python/pants/backend/python/util_rules/interpreter_constraints.py create mode 100644 src/python/pants/backend/python/util_rules/interpreter_constraints_test.py diff --git a/src/python/pants/backend/awslambda/python/rules.py b/src/python/pants/backend/awslambda/python/rules.py index 153d928d679..9de7c479281 100644 --- a/src/python/pants/backend/awslambda/python/rules.py +++ b/src/python/pants/backend/awslambda/python/rules.py @@ -12,8 +12,8 @@ ResolvePythonAwsHandlerRequest, ) from pants.backend.python.util_rules import pex_from_targets +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, PexPlatforms, PexRequest, PexRequirements, @@ -96,7 +96,7 @@ async def package_python_awslambda( output_filename="lambdex.pex", internal_only=True, requirements=PexRequirements(lambdex.all_requirements), - interpreter_constraints=PexInterpreterConstraints(lambdex.interpreter_constraints), + interpreter_constraints=InterpreterConstraints(lambdex.interpreter_constraints), main=lambdex.main, ) diff --git a/src/python/pants/backend/codegen/protobuf/python/rules.py b/src/python/pants/backend/codegen/protobuf/python/rules.py index 46c5e11060e..8d0a0f4ebe4 100644 --- a/src/python/pants/backend/codegen/protobuf/python/rules.py +++ b/src/python/pants/backend/codegen/protobuf/python/rules.py @@ -13,8 +13,8 @@ from pants.backend.codegen.protobuf.target_types import ProtobufGrpcToggle, ProtobufSources from pants.backend.python.target_types import PythonSources from pants.backend.python.util_rules import pex +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, PexRequest, PexRequirements, PexResolveInfo, @@ -109,7 +109,7 @@ async def generate_python_from_protobuf( output_filename="mypy_protobuf.pex", internal_only=True, requirements=PexRequirements([python_protobuf_mypy_plugin.requirement]), - interpreter_constraints=PexInterpreterConstraints( + interpreter_constraints=InterpreterConstraints( python_protobuf_mypy_plugin.interpreter_constraints ), ) diff --git a/src/python/pants/backend/python/dependency_inference/import_parser.py b/src/python/pants/backend/python/dependency_inference/import_parser.py index 12f4528c8cd..b25da5ad63f 100644 --- a/src/python/pants/backend/python/dependency_inference/import_parser.py +++ b/src/python/pants/backend/python/dependency_inference/import_parser.py @@ -3,7 +3,7 @@ from dataclasses import dataclass -from pants.backend.python.util_rules.pex import PexInterpreterConstraints +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex_environment import PythonExecutable from pants.core.util_rules.source_files import SourceFilesRequest from pants.core.util_rules.stripped_source_files import StrippedSourceFiles @@ -143,14 +143,14 @@ class ParsedPythonImports(DeduplicatedCollection[str]): @dataclass(frozen=True) class ParsePythonImportsRequest: sources: Sources - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints string_imports: bool @rule async def parse_python_imports(request: ParsePythonImportsRequest) -> ParsedPythonImports: python_interpreter, script_digest, stripped_sources = await MultiGet( - Get(PythonExecutable, PexInterpreterConstraints, request.interpreter_constraints), + Get(PythonExecutable, InterpreterConstraints, request.interpreter_constraints), Get(Digest, CreateDigest([FileContent("__parse_python_imports.py", _SCRIPT.encode())])), Get(StrippedSourceFiles, SourceFilesRequest([request.sources])), ) diff --git a/src/python/pants/backend/python/dependency_inference/import_parser_test.py b/src/python/pants/backend/python/dependency_inference/import_parser_test.py index ca1985f8f4a..11a8e3ffa41 100644 --- a/src/python/pants/backend/python/dependency_inference/import_parser_test.py +++ b/src/python/pants/backend/python/dependency_inference/import_parser_test.py @@ -14,7 +14,7 @@ ) from pants.backend.python.target_types import PythonLibrary, PythonSources from pants.backend.python.util_rules import pex -from pants.backend.python.util_rules.pex import PexInterpreterConstraints +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.core.util_rules import stripped_source_files from pants.engine.addresses import Address from pants.testutil.python_interpreter_selection import ( @@ -58,7 +58,7 @@ def assert_imports_parsed( [ ParsePythonImportsRequest( tgt[PythonSources], - PexInterpreterConstraints([constraints]), + InterpreterConstraints([constraints]), string_imports=string_imports, ) ], diff --git a/src/python/pants/backend/python/dependency_inference/rules.py b/src/python/pants/backend/python/dependency_inference/rules.py index b1f9ac7c157..a7d0a2f266a 100644 --- a/src/python/pants/backend/python/dependency_inference/rules.py +++ b/src/python/pants/backend/python/dependency_inference/rules.py @@ -15,7 +15,7 @@ from pants.backend.python.target_types import PythonSources, PythonTestsSources from pants.backend.python.util_rules import ancestor_files, pex from pants.backend.python.util_rules.ancestor_files import AncestorFiles, AncestorFilesRequest -from pants.backend.python.util_rules.pex import PexInterpreterConstraints +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.core.util_rules import stripped_source_files from pants.engine.addresses import Address from pants.engine.internals.graph import Owners, OwnersRequest @@ -138,7 +138,7 @@ async def infer_python_dependencies_via_imports( ParsedPythonImports, ParsePythonImportsRequest( request.sources_field, - PexInterpreterConstraints.create_from_targets([wrapped_tgt.target], python_setup), + InterpreterConstraints.create_from_targets([wrapped_tgt.target], python_setup), string_imports=python_infer_subsystem.string_imports, ), ), diff --git a/src/python/pants/backend/python/goals/coverage_py.py b/src/python/pants/backend/python/goals/coverage_py.py index 51750238679..737da9378f2 100644 --- a/src/python/pants/backend/python/goals/coverage_py.py +++ b/src/python/pants/backend/python/goals/coverage_py.py @@ -14,13 +14,8 @@ from pants.backend.python.subsystems.python_tool_base import PythonToolBase from pants.backend.python.target_types import ConsoleScript -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.backend.python.util_rules.python_sources import ( PythonSourceFiles, PythonSourceFilesRequest, @@ -306,7 +301,7 @@ async def setup_coverage(coverage: CoverageSubsystem) -> CoverageSetup: output_filename="coverage.pex", internal_only=True, requirements=PexRequirements(coverage.all_requirements), - interpreter_constraints=PexInterpreterConstraints(coverage.interpreter_constraints), + interpreter_constraints=InterpreterConstraints(coverage.interpreter_constraints), main=coverage.main, ), ) diff --git a/src/python/pants/backend/python/goals/pytest_runner.py b/src/python/pants/backend/python/goals/pytest_runner.py index 7b2245fd7d1..4a7f80cf6c2 100644 --- a/src/python/pants/backend/python/goals/pytest_runner.py +++ b/src/python/pants/backend/python/goals/pytest_runner.py @@ -14,9 +14,9 @@ ) from pants.backend.python.subsystems.pytest import PyTest from pants.backend.python.target_types import ConsoleScript, PythonTestsSources, PythonTestsTimeout +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( Pex, - PexInterpreterConstraints, PexRequest, PexRequirements, VenvPex, @@ -120,9 +120,7 @@ async def setup_pytest_for_target( ) all_targets = transitive_targets.closure - interpreter_constraints = PexInterpreterConstraints.create_from_targets( - all_targets, python_setup - ) + interpreter_constraints = InterpreterConstraints.create_from_targets(all_targets, python_setup) requirements_pex_get = Get( Pex, diff --git a/src/python/pants/backend/python/goals/setup_py.py b/src/python/pants/backend/python/goals/setup_py.py index 5d14329e584..10055838c12 100644 --- a/src/python/pants/backend/python/goals/setup_py.py +++ b/src/python/pants/backend/python/goals/setup_py.py @@ -23,13 +23,8 @@ ResolvePexEntryPointRequest, SetupPyCommandsField, ) -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.backend.python.util_rules.python_sources import ( PythonSourceFilesRequest, StrippedPythonSourceFiles, @@ -290,7 +285,7 @@ class RunSetupPyRequest: """A request to run a setup.py command.""" exported_target: ExportedTarget - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints chroot: SetupPyChroot args: Tuple[str, ...] @@ -365,7 +360,7 @@ async def package_python_dist( ) -> BuiltPackage: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) - interpreter_constraints = PexInterpreterConstraints.create_from_targets( + interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) chroot = await Get( diff --git a/src/python/pants/backend/python/lint/bandit/rules.py b/src/python/pants/backend/python/lint/bandit/rules.py index 447d6aac781..a600a49669f 100644 --- a/src/python/pants/backend/python/lint/bandit/rules.py +++ b/src/python/pants/backend/python/lint/bandit/rules.py @@ -8,13 +8,8 @@ from pants.backend.python.lint.bandit.subsystem import Bandit from pants.backend.python.target_types import InterpreterConstraintsField, PythonSources from pants.backend.python.util_rules import pex -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.core.goals.lint import LintReport, LintRequest, LintResult, LintResults, LintSubsystem from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest @@ -47,7 +42,7 @@ class BanditRequest(LintRequest): @dataclass(frozen=True) class BanditPartition: field_sets: Tuple[BanditFieldSet, ...] - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints def generate_argv( @@ -138,7 +133,7 @@ async def bandit_lint( # ( https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit). We # batch targets by their constraints to ensure, for example, that all Python 2 targets run # together and all Python 3 targets run together. - constraints_to_field_sets = PexInterpreterConstraints.group_field_sets_by_constraints( + constraints_to_field_sets = InterpreterConstraints.group_field_sets_by_constraints( request.field_sets, python_setup ) partitioned_results = await MultiGet( diff --git a/src/python/pants/backend/python/lint/black/rules.py b/src/python/pants/backend/python/lint/black/rules.py index 7b23baf09d7..a6966870aba 100644 --- a/src/python/pants/backend/python/lint/black/rules.py +++ b/src/python/pants/backend/python/lint/black/rules.py @@ -9,13 +9,8 @@ from pants.backend.python.lint.python_fmt import PythonFmtRequest from pants.backend.python.target_types import InterpreterConstraintsField, PythonSources from pants.backend.python.util_rules import pex -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.core.goals.fmt import FmtResult from pants.core.goals.lint import LintRequest, LintResult, LintResults from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest @@ -78,7 +73,7 @@ async def setup_black( # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on # the machine. - all_interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( + all_interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields( (field_set.interpreter_constraints for field_set in setup_request.request.field_sets), python_setup, ) @@ -88,7 +83,7 @@ async def setup_black( all_interpreter_constraints.requires_python38_or_newer() and black.options.is_default("interpreter_constraints") ) - else PexInterpreterConstraints(black.interpreter_constraints) + else InterpreterConstraints(black.interpreter_constraints) ) black_pex_get = Get( diff --git a/src/python/pants/backend/python/lint/docformatter/rules.py b/src/python/pants/backend/python/lint/docformatter/rules.py index eee5e4e44a5..d280ed53971 100644 --- a/src/python/pants/backend/python/lint/docformatter/rules.py +++ b/src/python/pants/backend/python/lint/docformatter/rules.py @@ -9,13 +9,8 @@ from pants.backend.python.lint.python_fmt import PythonFmtRequest from pants.backend.python.target_types import PythonSources from pants.backend.python.util_rules import pex -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.core.goals.fmt import FmtResult from pants.core.goals.lint import LintRequest, LintResult, LintResults from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest @@ -69,7 +64,7 @@ async def setup_docformatter(setup_request: SetupRequest, docformatter: Docforma output_filename="docformatter.pex", internal_only=True, requirements=PexRequirements(docformatter.all_requirements), - interpreter_constraints=PexInterpreterConstraints(docformatter.interpreter_constraints), + interpreter_constraints=InterpreterConstraints(docformatter.interpreter_constraints), main=docformatter.main, ), ) diff --git a/src/python/pants/backend/python/lint/flake8/rules.py b/src/python/pants/backend/python/lint/flake8/rules.py index 488191de4f2..b85c14ed2ab 100644 --- a/src/python/pants/backend/python/lint/flake8/rules.py +++ b/src/python/pants/backend/python/lint/flake8/rules.py @@ -8,13 +8,8 @@ from pants.backend.python.lint.flake8.subsystem import Flake8 from pants.backend.python.target_types import InterpreterConstraintsField, PythonSources from pants.backend.python.util_rules import pex -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.core.goals.lint import LintReport, LintRequest, LintResult, LintResults, LintSubsystem from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest @@ -47,7 +42,7 @@ class Flake8Request(LintRequest): @dataclass(frozen=True) class Flake8Partition: field_sets: Tuple[Flake8FieldSet, ...] - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints def generate_argv( @@ -136,7 +131,7 @@ async def flake8_lint( # (http://flake8.pycqa.org/en/latest/user/invocation.html). We batch targets by their # constraints to ensure, for example, that all Python 2 targets run together and all Python 3 # targets run together. - constraints_to_field_sets = PexInterpreterConstraints.group_field_sets_by_constraints( + constraints_to_field_sets = InterpreterConstraints.group_field_sets_by_constraints( request.field_sets, python_setup ) partitioned_results = await MultiGet( diff --git a/src/python/pants/backend/python/lint/isort/rules.py b/src/python/pants/backend/python/lint/isort/rules.py index a28b606933a..2fd12feed63 100644 --- a/src/python/pants/backend/python/lint/isort/rules.py +++ b/src/python/pants/backend/python/lint/isort/rules.py @@ -9,8 +9,8 @@ from pants.backend.python.lint.python_fmt import PythonFmtRequest from pants.backend.python.target_types import PythonSources from pants.backend.python.util_rules import pex +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, PexRequest, PexRequirements, PexResolveInfo, @@ -90,7 +90,7 @@ async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), - interpreter_constraints=PexInterpreterConstraints(isort.interpreter_constraints), + interpreter_constraints=InterpreterConstraints(isort.interpreter_constraints), main=isort.main, ), ) diff --git a/src/python/pants/backend/python/lint/pylint/rules.py b/src/python/pants/backend/python/lint/pylint/rules.py index 101b8f993ac..eda765400a8 100644 --- a/src/python/pants/backend/python/lint/pylint/rules.py +++ b/src/python/pants/backend/python/lint/pylint/rules.py @@ -13,9 +13,9 @@ PythonSources, ) from pants.backend.python.util_rules import pex_from_targets +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( Pex, - PexInterpreterConstraints, PexRequest, PexRequirements, VenvPex, @@ -73,13 +73,13 @@ class PylintTargetSetup: class PylintPartition: field_sets: Tuple[PylintFieldSet, ...] targets_with_dependencies: Targets - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints plugin_targets: Targets def __init__( self, target_setups: Iterable[PylintTargetSetup], - interpreter_constraints: PexInterpreterConstraints, + interpreter_constraints: InterpreterConstraints, plugin_targets: Iterable[Target], ) -> None: field_sets = [] @@ -261,7 +261,7 @@ async def pylint_lint( request.field_sets, linted_targets, per_target_dependencies ): target_setup = PylintTargetSetup(field_set, Targets([tgt, *dependencies])) - interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( + interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields( ( *( tgt[InterpreterConstraintsField] diff --git a/src/python/pants/backend/python/mixed_interpreter_constraints/py_constraints.py b/src/python/pants/backend/python/mixed_interpreter_constraints/py_constraints.py index 7c316c51f7d..3b1eea29435 100644 --- a/src/python/pants/backend/python/mixed_interpreter_constraints/py_constraints.py +++ b/src/python/pants/backend/python/mixed_interpreter_constraints/py_constraints.py @@ -9,7 +9,7 @@ from pants.backend.project_info.dependees import Dependees, DependeesRequest from pants.backend.python.target_types import InterpreterConstraintsField -from pants.backend.python.util_rules.pex import PexInterpreterConstraints +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.base.specs import AddressSpecs, DescendantAddresses from pants.engine.addresses import Address, Addresses from pants.engine.console import Console @@ -91,7 +91,7 @@ async def py_constraints( ) constraints_per_tgt = [ - PexInterpreterConstraints.create_from_targets([tgt], python_setup) + InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_python_targets ] @@ -100,7 +100,7 @@ async def py_constraints( for tgt in all_python_targets ) transitive_constraints_per_tgt = [ - PexInterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) + InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) for transitive_targets in transitive_targets_per_tgt ] @@ -144,7 +144,7 @@ async def py_constraints( return PyConstraintsGoal(exit_code=0) transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(addresses)) - final_constraints = PexInterpreterConstraints.create_from_targets( + final_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) @@ -162,7 +162,7 @@ async def py_constraints( constraints_to_addresses = defaultdict(set) for tgt in transitive_targets.closure: - constraints = PexInterpreterConstraints.create_from_targets([tgt], python_setup) + constraints = InterpreterConstraints.create_from_targets([tgt], python_setup) if not constraints: continue constraints_to_addresses[constraints].add(tgt.address) diff --git a/src/python/pants/backend/python/typecheck/mypy/rules.py b/src/python/pants/backend/python/typecheck/mypy/rules.py index dd03428ca48..9e2d7f4b728 100644 --- a/src/python/pants/backend/python/typecheck/mypy/rules.py +++ b/src/python/pants/backend/python/typecheck/mypy/rules.py @@ -11,9 +11,9 @@ from pants.backend.python.typecheck.mypy.skip_field import SkipMyPyField from pants.backend.python.typecheck.mypy.subsystem import MyPy from pants.backend.python.util_rules import pex_from_targets +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( Pex, - PexInterpreterConstraints, PexRequest, PexRequirements, VenvPex, @@ -57,7 +57,7 @@ def opt_out(cls, tgt: Target) -> bool: class MyPyPartition: root_targets: FrozenOrderedSet[Target] closure: FrozenOrderedSet[Target] - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints python_version_already_configured: bool @@ -158,7 +158,7 @@ async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> Type mypy.options.is_default("interpreter_constraints") and partition.interpreter_constraints.requires_python38_or_newer() ) - else PexInterpreterConstraints(mypy.interpreter_constraints) + else InterpreterConstraints(mypy.interpreter_constraints) ) plugin_sources_get = Get( @@ -313,9 +313,9 @@ async def mypy_typecheck( interpreter_constraints_to_transitive_targets = defaultdict(set) for transitive_targets in transitive_targets_per_field_set: - interpreter_constraints = PexInterpreterConstraints.create_from_targets( + interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup - ) or PexInterpreterConstraints(mypy.interpreter_constraints) + ) or InterpreterConstraints(mypy.interpreter_constraints) interpreter_constraints_to_transitive_targets[interpreter_constraints].add( transitive_targets ) diff --git a/src/python/pants/backend/python/util_rules/interpreter_constraints.py b/src/python/pants/backend/python/util_rules/interpreter_constraints.py new file mode 100644 index 00000000000..1b8e67d1972 --- /dev/null +++ b/src/python/pants/backend/python/util_rules/interpreter_constraints.py @@ -0,0 +1,240 @@ +# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import annotations + +import functools +import itertools +from collections import defaultdict +from typing import FrozenSet, Iterable, List, Sequence, Set, Tuple, TypeVar + +from pkg_resources import Requirement +from typing_extensions import Protocol + +from pants.backend.python.target_types import InterpreterConstraintsField +from pants.build_graph.address import Address +from pants.engine.engine_aware import EngineAwareParameter +from pants.engine.target import Target +from pants.python.python_setup import PythonSetup +from pants.util.frozendict import FrozenDict +from pants.util.ordered_set import FrozenOrderedSet + + +# This protocol allows us to work with any arbitrary FieldSet. See +# https://mypy.readthedocs.io/en/stable/protocols.html. +class FieldSetWithInterpreterConstraints(Protocol): + @property + def address(self) -> Address: + ... + + @property + def interpreter_constraints(self) -> InterpreterConstraintsField: + ... + + +_FS = TypeVar("_FS", bound=FieldSetWithInterpreterConstraints) + + +# Normally we would subclass `DeduplicatedCollection`, but we want a custom constructor. +class InterpreterConstraints(FrozenOrderedSet[Requirement], EngineAwareParameter): + def __init__(self, constraints: Iterable[str | Requirement] = ()) -> None: + super().__init__( + v if isinstance(v, Requirement) else self.parse_constraint(v) + for v in sorted(constraints, key=lambda c: str(c)) + ) + + @staticmethod + def parse_constraint(constraint: str) -> Requirement: + """Parse an interpreter constraint, e.g., CPython>=2.7,<3. + + We allow shorthand such as `>=3.7`, which gets expanded to `CPython>=3.7`. See Pex's + interpreter.py's `parse_requirement()`. + """ + try: + parsed_requirement = Requirement.parse(constraint) + except ValueError: + parsed_requirement = Requirement.parse(f"CPython{constraint}") + return parsed_requirement + + @classmethod + def merge_constraint_sets(cls, constraint_sets: Iterable[Iterable[str]]) -> List[Requirement]: + """Given a collection of constraints sets, merge by ORing within each individual constraint + set and ANDing across each distinct constraint set. + + For example, given `[["CPython>=2.7", "CPython<=3"], ["CPython==3.6.*"]]`, return + `["CPython>=2.7,==3.6.*", "CPython<=3,==3.6.*"]`. + """ + # Each element (a Set[ParsedConstraint]) will get ANDed. We use sets to deduplicate + # identical top-level parsed constraint sets. + if not constraint_sets: + return [] + parsed_constraint_sets: Set[FrozenSet[Requirement]] = set() + for constraint_set in constraint_sets: + # Each element (a ParsedConstraint) will get ORed. + parsed_constraint_set = frozenset( + cls.parse_constraint(constraint) for constraint in constraint_set + ) + parsed_constraint_sets.add(parsed_constraint_set) + + def and_constraints(parsed_constraints: Sequence[Requirement]) -> Requirement: + merged_specs: Set[Tuple[str, str]] = set() + expected_interpreter = parsed_constraints[0].project_name + for parsed_constraint in parsed_constraints: + if parsed_constraint.project_name == expected_interpreter: + merged_specs.update(parsed_constraint.specs) + continue + + def key_fn(req: Requirement): + return req.project_name + + # NB: We must pre-sort the data for itertools.groupby() to work properly. + sorted_constraints = sorted(parsed_constraints, key=key_fn) + attempted_interpreters = { + interp: sorted( + str(parsed_constraint) for parsed_constraint in parsed_constraints + ) + for interp, parsed_constraints in itertools.groupby( + sorted_constraints, key=key_fn + ) + } + raise ValueError( + "Tried ANDing Python interpreter constraints with different interpreter " + "types. Please use only one interpreter type. Got " + f"{attempted_interpreters}." + ) + + formatted_specs = ",".join(f"{op}{version}" for op, version in merged_specs) + return Requirement.parse(f"{expected_interpreter}{formatted_specs}") + + def cmp_constraints(req1: Requirement, req2: Requirement) -> int: + if req1.project_name != req2.project_name: + return -1 if req1.project_name < req2.project_name else 1 + if req1.specs == req2.specs: + return 0 + return -1 if req1.specs < req2.specs else 1 + + return sorted( + { + and_constraints(constraints_product) + for constraints_product in itertools.product(*parsed_constraint_sets) + }, + key=functools.cmp_to_key(cmp_constraints), + ) + + @classmethod + def create_from_targets( + cls, targets: Iterable[Target], python_setup: PythonSetup + ) -> InterpreterConstraints: + return cls.create_from_compatibility_fields( + ( + tgt[InterpreterConstraintsField] + for tgt in targets + if tgt.has_field(InterpreterConstraintsField) + ), + python_setup, + ) + + @classmethod + def create_from_compatibility_fields( + cls, fields: Iterable[InterpreterConstraintsField], python_setup: PythonSetup + ) -> InterpreterConstraints: + constraint_sets = {field.value_or_global_default(python_setup) for field in fields} + # This will OR within each field and AND across fields. + merged_constraints = cls.merge_constraint_sets(constraint_sets) + return InterpreterConstraints(merged_constraints) + + @classmethod + def group_field_sets_by_constraints( + cls, field_sets: Iterable[_FS], python_setup: PythonSetup + ) -> FrozenDict["InterpreterConstraints", Tuple[_FS, ...]]: + results = defaultdict(set) + for fs in field_sets: + constraints = cls.create_from_compatibility_fields( + [fs.interpreter_constraints], python_setup + ) + results[constraints].add(fs) + return FrozenDict( + { + constraints: tuple(sorted(field_sets, key=lambda fs: fs.address)) + for constraints, field_sets in sorted(results.items()) + } + ) + + def generate_pex_arg_list(self) -> List[str]: + args = [] + for constraint in self: + args.extend(["--interpreter-constraint", str(constraint)]) + return args + + def _includes_version(self, major_minor: str, last_patch: int) -> bool: + patch_versions = list(reversed(range(0, last_patch + 1))) + for req in self: + if any( + req.specifier.contains(f"{major_minor}.{p}") for p in patch_versions # type: ignore[attr-defined] + ): + return True + return False + + def includes_python2(self) -> bool: + """Checks if any of the constraints include Python 2. + + This will return True even if the code works with Python 3 too, so long as at least one of + the constraints works with Python 2. + """ + last_py27_patch_version = 18 + return self._includes_version("2.7", last_patch=last_py27_patch_version) + + def minimum_python_version(self) -> str | None: + """Find the lowest major.minor Python version that will work with these constraints. + + The constraints may also be compatible with later versions; this is the lowest version that + still works. + """ + if self.includes_python2(): + return "2.7" + max_expected_py3_patch_version = 15 # The current max is 3.6.12. + for major_minor in ("3.5", "3.6", "3.7", "3.8", "3.9", "3.10"): + if self._includes_version(major_minor, last_patch=max_expected_py3_patch_version): + return major_minor + return None + + def _requires_python3_version_or_newer( + self, *, allowed_versions: Iterable[str], prior_version: str + ) -> bool: + # Assume any 3.x release has no more than 15 releases. The max is currently 3.6.12. + patch_versions = list(reversed(range(0, 15))) + # We only need to look at the prior Python release. For example, consider Python 3.8+ + # looking at 3.7. If using something like `>=3.5`, Py37 will be included. + # `==3.6.*,!=3.7.*,==3.8.*` is extremely unlikely, and even that will work correctly as + # it's an invalid constraint so setuptools returns False always. `['==2.7.*', '==3.8.*']` + # will fail because not every single constraint is exclusively 3.8. + prior_versions = [f"{prior_version}.{p}" for p in patch_versions] + allowed_versions = [ + f"{major_minor}.{p}" for major_minor in allowed_versions for p in patch_versions + ] + for req in self: + if any( + req.specifier.contains(prior) for prior in prior_versions # type: ignore[attr-defined] + ): + return False + if not any( + req.specifier.contains(allowed) for allowed in allowed_versions # type: ignore[attr-defined] + ): + return False + return True + + def requires_python38_or_newer(self) -> bool: + """Checks if the constraints are all for Python 3.8+. + + This will return False if Python 3.8 is allowed, but prior versions like 3.7 are also + allowed. + """ + return self._requires_python3_version_or_newer( + allowed_versions=["3.8", "3.9", "3.10"], prior_version="3.7" + ) + + def __str__(self) -> str: + return " OR ".join(str(constraint) for constraint in self) + + def debug_hint(self) -> str: + return str(self) diff --git a/src/python/pants/backend/python/util_rules/interpreter_constraints_test.py b/src/python/pants/backend/python/util_rules/interpreter_constraints_test.py new file mode 100644 index 00000000000..effc01dfe29 --- /dev/null +++ b/src/python/pants/backend/python/util_rules/interpreter_constraints_test.py @@ -0,0 +1,265 @@ +# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import annotations + +from dataclasses import dataclass +from typing import List + +import pytest +from pkg_resources import Requirement + +from pants.backend.python.target_types import InterpreterConstraintsField +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.build_graph.address import Address +from pants.engine.target import FieldSet +from pants.python.python_setup import PythonSetup +from pants.testutil.option_util import create_subsystem +from pants.util.frozendict import FrozenDict + + +@dataclass(frozen=True) +class MockFieldSet(FieldSet): + interpreter_constraints: InterpreterConstraintsField + + @classmethod + def create_for_test(cls, address: Address, compat: str | None) -> MockFieldSet: + return cls( + address=address, + interpreter_constraints=InterpreterConstraintsField( + [compat] if compat else None, address=address + ), + ) + + +def test_merge_interpreter_constraints() -> None: + def assert_merged(*, inp: List[List[str]], expected: List[str]) -> None: + result = sorted(str(req) for req in InterpreterConstraints.merge_constraint_sets(inp)) + # Requirement.parse() sorts specs differently than we'd like, so we convert each str to a + # Requirement. + normalized_expected = sorted(str(Requirement.parse(v)) for v in expected) + assert result == normalized_expected + + # Multiple constraint sets get merged so that they are ANDed. + # A & B => A & B + assert_merged(inp=[["CPython==2.7.*"], ["CPython==3.6.*"]], expected=["CPython==2.7.*,==3.6.*"]) + + # Multiple constraints within a single constraint set are kept separate so that they are ORed. + # A | B => A | B + assert_merged( + inp=[["CPython==2.7.*", "CPython==3.6.*"]], expected=["CPython==2.7.*", "CPython==3.6.*"] + ) + + # Input constraints already were ANDed. + # A => A + assert_merged(inp=[["CPython>=2.7,<3"]], expected=["CPython>=2.7,<3"]) + + # Both AND and OR. + # (A | B) & C => (A & B) | (B & C) + assert_merged( + inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*"]], + expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], + ) + # A & B & (C | D) => (A & B & C) | (A & B & D) + assert_merged( + inp=[["CPython==2.7.*"], ["CPython==3.6.*"], ["CPython==3.7.*", "CPython==3.8.*"]], + expected=["CPython==2.7.*,==3.6.*,==3.7.*", "CPython==2.7.*,==3.6.*,==3.8.*"], + ) + # (A | B) & (C | D) => (A & C) | (A & D) | (B & C) | (B & D) + assert_merged( + inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*", "CPython==3.7.*"]], + expected=[ + "CPython>=2.7,<3,==3.6.*", + "CPython>=2.7,<3,==3.7.*", + "CPython>=3.5,==3.6.*", + "CPython>=3.5,==3.7.*", + ], + ) + # A & (B | C | D) & (E | F) & G => + # (A & B & E & G) | (A & B & F & G) | (A & C & E & G) | (A & C & F & G) | (A & D & E & G) | (A & D & F & G) + assert_merged( + inp=[ + ["CPython==3.6.5"], + ["CPython==2.7.14", "CPython==2.7.15", "CPython==2.7.16"], + ["CPython>=3.6", "CPython==3.5.10"], + ["CPython>3.8"], + ], + expected=[ + "CPython==2.7.14,==3.5.10,==3.6.5,>3.8", + "CPython==2.7.14,>=3.6,==3.6.5,>3.8", + "CPython==2.7.15,==3.5.10,==3.6.5,>3.8", + "CPython==2.7.15,>=3.6,==3.6.5,>3.8", + "CPython==2.7.16,==3.5.10,==3.6.5,>3.8", + "CPython==2.7.16,>=3.6,==3.6.5,>3.8", + ], + ) + + # Deduplicate between constraint_sets + # (A | B) & (A | B) => A | B. Naively, this should actually resolve as follows: + # (A | B) & (A | B) => (A & A) | (A & B) | (B & B) => A | (A & B) | B. + # But, we first deduplicate each constraint_set. (A | B) & (A | B) can be rewritten as + # X & X => X. + assert_merged( + inp=[["CPython==2.7.*", "CPython==3.6.*"], ["CPython==2.7.*", "CPython==3.6.*"]], + expected=["CPython==2.7.*", "CPython==3.6.*"], + ) + # (A | B) & C & (A | B) => (A & C) | (B & C). Alternatively, this can be rewritten as + # X & Y & X => X & Y. + assert_merged( + inp=[ + ["CPython>=2.7,<3", "CPython>=3.5"], + ["CPython==3.6.*"], + ["CPython>=3.5", "CPython>=2.7,<3"], + ], + expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], + ) + + # No specifiers + assert_merged(inp=[["CPython"]], expected=["CPython"]) + assert_merged(inp=[["CPython"], ["CPython==3.7.*"]], expected=["CPython==3.7.*"]) + + # No interpreter is shorthand for CPython, which is how Pex behaves + assert_merged(inp=[[">=3.5"], ["CPython==3.7.*"]], expected=["CPython>=3.5,==3.7.*"]) + + # Different Python interpreters, which are guaranteed to fail when ANDed but are safe when ORed. + with pytest.raises(ValueError): + InterpreterConstraints.merge_constraint_sets([["CPython==3.7.*"], ["PyPy==43.0"]]) + assert_merged(inp=[["CPython==3.7.*", "PyPy==43.0"]], expected=["CPython==3.7.*", "PyPy==43.0"]) + + # Ensure we can handle empty input. + assert_merged(inp=[], expected=[]) + + +@pytest.mark.parametrize( + "constraints", + [ + ["CPython>=2.7,<3"], + ["CPython>=2.7,<3", "CPython>=3.6"], + ["CPython>=2.7.13"], + ["CPython>=2.7.13,<2.7.16"], + ["CPython>=2.7.13,!=2.7.16"], + ["PyPy>=2.7,<3"], + ], +) +def test_interpreter_constraints_includes_python2(constraints) -> None: + assert InterpreterConstraints(constraints).includes_python2() is True + + +@pytest.mark.parametrize( + "constraints", + [ + ["CPython>=3.6"], + ["CPython>=3.7"], + ["CPython>=3.6", "CPython>=3.8"], + ["CPython!=2.7.*"], + ["PyPy>=3.6"], + ], +) +def test_interpreter_constraints_do_not_include_python2(constraints): + assert InterpreterConstraints(constraints).includes_python2() is False + + +@pytest.mark.parametrize( + "constraints,expected", + [ + (["CPython>=2.7"], "2.7"), + (["CPython>=3.5"], "3.5"), + (["CPython>=3.6"], "3.6"), + (["CPython>=3.7"], "3.7"), + (["CPython>=3.8"], "3.8"), + (["CPython>=3.9"], "3.9"), + (["CPython>=3.10"], "3.10"), + (["CPython==2.7.10"], "2.7"), + (["CPython==3.5.*", "CPython>=3.6"], "3.5"), + (["CPython==2.6.*"], None), + ], +) +def test_interpreter_constraints_minimum_python_version( + constraints: List[str], expected: str +) -> None: + assert InterpreterConstraints(constraints).minimum_python_version() == expected + + +@pytest.mark.parametrize( + "constraints", + [ + ["CPython==3.8.*"], + ["CPython==3.8.1"], + ["CPython==3.9.1"], + ["CPython>=3.8"], + ["CPython>=3.9"], + ["CPython>=3.10"], + ["CPython==3.8.*", "CPython==3.9.*"], + ["PyPy>=3.8"], + ], +) +def test_interpreter_constraints_require_python38(constraints) -> None: + assert InterpreterConstraints(constraints).requires_python38_or_newer() is True + + +@pytest.mark.parametrize( + "constraints", + [ + ["CPython==3.5.*"], + ["CPython==3.6.*"], + ["CPython==3.7.*"], + ["CPython==3.7.3"], + ["CPython>=3.7"], + ["CPython==3.7.*", "CPython==3.8.*"], + ["CPython==3.5.3", "CPython==3.8.3"], + ["PyPy>=3.7"], + ], +) +def test_interpreter_constraints_do_not_require_python38(constraints): + assert InterpreterConstraints(constraints).requires_python38_or_newer() is False + + +def test_group_field_sets_by_constraints() -> None: + py2_fs = MockFieldSet.create_for_test(Address("", target_name="py2"), ">=2.7,<3") + py3_fs = [ + MockFieldSet.create_for_test(Address("", target_name="py3"), "==3.6.*"), + MockFieldSet.create_for_test(Address("", target_name="py3_second"), "==3.6.*"), + ] + no_constraints_fs = MockFieldSet.create_for_test( + Address("", target_name="no_constraints"), None + ) + assert InterpreterConstraints.group_field_sets_by_constraints( + [py2_fs, *py3_fs, no_constraints_fs], + python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), + ) == FrozenDict( + { + InterpreterConstraints(): (no_constraints_fs,), + InterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs,), + InterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), + } + ) + + +def test_group_field_sets_by_constraints_with_unsorted_inputs() -> None: + py3_fs = [ + MockFieldSet.create_for_test( + Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*" + ), + MockFieldSet.create_for_test( + Address("src/python/b_dir/path.py", target_name="test"), ">2.7,<3" + ), + MockFieldSet.create_for_test( + Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*" + ), + ] + + ic_36 = InterpreterConstraints([Requirement.parse("CPython==3.6.*")]) + + output = InterpreterConstraints.group_field_sets_by_constraints( + py3_fs, + python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), + ) + + assert output[ic_36] == ( + MockFieldSet.create_for_test( + Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*" + ), + MockFieldSet.create_for_test( + Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*" + ), + ) diff --git a/src/python/pants/backend/python/util_rules/pex.py b/src/python/pants/backend/python/util_rules/pex.py index 2aab0f2b87d..5e6607a6968 100644 --- a/src/python/pants/backend/python/util_rules/pex.py +++ b/src/python/pants/backend/python/util_rules/pex.py @@ -4,36 +4,33 @@ from __future__ import annotations import dataclasses -import functools -import itertools import json import logging import shlex -from collections import defaultdict from dataclasses import dataclass from pathlib import PurePath from textwrap import dedent -from typing import FrozenSet, Iterable, Iterator, List, Mapping, Sequence, Set, Tuple, TypeVar +from typing import Iterable, Iterator, List, Mapping, Tuple import packaging.specifiers import packaging.version from pkg_resources import Requirement -from typing_extensions import Protocol -from pants.backend.python.target_types import InterpreterConstraintsField, MainSpecification +from pants.backend.python.target_types import MainSpecification from pants.backend.python.target_types import PexPlatformsField as PythonPlatformsField from pants.backend.python.target_types import ( PythonRequirementConstraintsField, PythonRequirementsField, ) from pants.backend.python.util_rules import pex_cli +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex_cli import PexCliProcess, PexPEX from pants.backend.python.util_rules.pex_environment import ( PexEnvironment, PexRuntimeEnvironment, PythonExecutable, ) -from pants.engine.addresses import Address, UnparsedAddressInputs +from pants.engine.addresses import UnparsedAddressInputs from pants.engine.collection import Collection, DeduplicatedCollection from pants.engine.engine_aware import EngineAwareParameter from pants.engine.fs import ( @@ -56,13 +53,12 @@ ProcessResult, ) from pants.engine.rules import Get, collect_rules, rule -from pants.engine.target import Target, Targets +from pants.engine.target import Targets from pants.python.python_repos import PythonRepos from pants.python.python_setup import PythonSetup from pants.util.frozendict import FrozenDict from pants.util.logging import LogLevel from pants.util.meta import frozen_after_init -from pants.util.ordered_set import FrozenOrderedSet from pants.util.strutil import pluralize @@ -80,226 +76,6 @@ def create_from_requirement_fields( return PexRequirements({*field_requirements, *additional_requirements}) -# This protocol allows us to work with any arbitrary FieldSet. See -# https://mypy.readthedocs.io/en/stable/protocols.html. -class FieldSetWithInterpreterConstraints(Protocol): - @property - def address(self) -> Address: - ... - - @property - def interpreter_constraints(self) -> InterpreterConstraintsField: - ... - - -_FS = TypeVar("_FS", bound=FieldSetWithInterpreterConstraints) - - -# Normally we would subclass `DeduplicatedCollection`, but we want a custom constructor. -class PexInterpreterConstraints(FrozenOrderedSet[Requirement], EngineAwareParameter): - def __init__(self, constraints: Iterable[str | Requirement] = ()) -> None: - super().__init__( - v if isinstance(v, Requirement) else self.parse_constraint(v) - for v in sorted(constraints, key=lambda c: str(c)) - ) - - @staticmethod - def parse_constraint(constraint: str) -> Requirement: - """Parse an interpreter constraint, e.g., CPython>=2.7,<3. - - We allow shorthand such as `>=3.7`, which gets expanded to `CPython>=3.7`. See Pex's - interpreter.py's `parse_requirement()`. - """ - try: - parsed_requirement = Requirement.parse(constraint) - except ValueError: - parsed_requirement = Requirement.parse(f"CPython{constraint}") - return parsed_requirement - - @classmethod - def merge_constraint_sets(cls, constraint_sets: Iterable[Iterable[str]]) -> List[Requirement]: - """Given a collection of constraints sets, merge by ORing within each individual constraint - set and ANDing across each distinct constraint set. - - For example, given `[["CPython>=2.7", "CPython<=3"], ["CPython==3.6.*"]]`, return - `["CPython>=2.7,==3.6.*", "CPython<=3,==3.6.*"]`. - """ - # Each element (a Set[ParsedConstraint]) will get ANDed. We use sets to deduplicate - # identical top-level parsed constraint sets. - if not constraint_sets: - return [] - parsed_constraint_sets: Set[FrozenSet[Requirement]] = set() - for constraint_set in constraint_sets: - # Each element (a ParsedConstraint) will get ORed. - parsed_constraint_set = frozenset( - cls.parse_constraint(constraint) for constraint in constraint_set - ) - parsed_constraint_sets.add(parsed_constraint_set) - - def and_constraints(parsed_constraints: Sequence[Requirement]) -> Requirement: - merged_specs: Set[Tuple[str, str]] = set() - expected_interpreter = parsed_constraints[0].project_name - for parsed_constraint in parsed_constraints: - if parsed_constraint.project_name == expected_interpreter: - merged_specs.update(parsed_constraint.specs) - continue - - def key_fn(req: Requirement): - return req.project_name - - # NB: We must pre-sort the data for itertools.groupby() to work properly. - sorted_constraints = sorted(parsed_constraints, key=key_fn) - attempted_interpreters = { - interp: sorted( - str(parsed_constraint) for parsed_constraint in parsed_constraints - ) - for interp, parsed_constraints in itertools.groupby( - sorted_constraints, key=key_fn - ) - } - raise ValueError( - "Tried ANDing Python interpreter constraints with different interpreter " - "types. Please use only one interpreter type. Got " - f"{attempted_interpreters}." - ) - - formatted_specs = ",".join(f"{op}{version}" for op, version in merged_specs) - return Requirement.parse(f"{expected_interpreter}{formatted_specs}") - - def cmp_constraints(req1: Requirement, req2: Requirement) -> int: - if req1.project_name != req2.project_name: - return -1 if req1.project_name < req2.project_name else 1 - if req1.specs == req2.specs: - return 0 - return -1 if req1.specs < req2.specs else 1 - - return sorted( - { - and_constraints(constraints_product) - for constraints_product in itertools.product(*parsed_constraint_sets) - }, - key=functools.cmp_to_key(cmp_constraints), - ) - - @classmethod - def create_from_targets( - cls, targets: Iterable[Target], python_setup: PythonSetup - ) -> PexInterpreterConstraints: - return cls.create_from_compatibility_fields( - ( - tgt[InterpreterConstraintsField] - for tgt in targets - if tgt.has_field(InterpreterConstraintsField) - ), - python_setup, - ) - - @classmethod - def create_from_compatibility_fields( - cls, fields: Iterable[InterpreterConstraintsField], python_setup: PythonSetup - ) -> PexInterpreterConstraints: - constraint_sets = {field.value_or_global_default(python_setup) for field in fields} - # This will OR within each field and AND across fields. - merged_constraints = cls.merge_constraint_sets(constraint_sets) - return PexInterpreterConstraints(merged_constraints) - - @classmethod - def group_field_sets_by_constraints( - cls, field_sets: Iterable[_FS], python_setup: PythonSetup - ) -> FrozenDict["PexInterpreterConstraints", Tuple[_FS, ...]]: - results = defaultdict(set) - for fs in field_sets: - constraints = cls.create_from_compatibility_fields( - [fs.interpreter_constraints], python_setup - ) - results[constraints].add(fs) - return FrozenDict( - { - constraints: tuple(sorted(field_sets, key=lambda fs: fs.address)) - for constraints, field_sets in sorted(results.items()) - } - ) - - def generate_pex_arg_list(self) -> List[str]: - args = [] - for constraint in self: - args.extend(["--interpreter-constraint", str(constraint)]) - return args - - def _includes_version(self, major_minor: str, last_patch: int) -> bool: - patch_versions = list(reversed(range(0, last_patch + 1))) - for req in self: - if any( - req.specifier.contains(f"{major_minor}.{p}") for p in patch_versions # type: ignore[attr-defined] - ): - return True - return False - - def includes_python2(self) -> bool: - """Checks if any of the constraints include Python 2. - - This will return True even if the code works with Python 3 too, so long as at least one of - the constraints works with Python 2. - """ - last_py27_patch_version = 18 - return self._includes_version("2.7", last_patch=last_py27_patch_version) - - def minimum_python_version(self) -> str | None: - """Find the lowest major.minor Python version that will work with these constraints. - - The constraints may also be compatible with later versions; this is the lowest version that - still works. - """ - if self.includes_python2(): - return "2.7" - max_expected_py3_patch_version = 15 # The current max is 3.6.12. - for major_minor in ("3.5", "3.6", "3.7", "3.8", "3.9", "3.10"): - if self._includes_version(major_minor, last_patch=max_expected_py3_patch_version): - return major_minor - return None - - def _requires_python3_version_or_newer( - self, *, allowed_versions: Iterable[str], prior_version: str - ) -> bool: - # Assume any 3.x release has no more than 15 releases. The max is currently 3.6.12. - patch_versions = list(reversed(range(0, 15))) - # We only need to look at the prior Python release. For example, consider Python 3.8+ - # looking at 3.7. If using something like `>=3.5`, Py37 will be included. - # `==3.6.*,!=3.7.*,==3.8.*` is extremely unlikely, and even that will work correctly as - # it's an invalid constraint so setuptools returns False always. `['==2.7.*', '==3.8.*']` - # will fail because not every single constraint is exclusively 3.8. - prior_versions = [f"{prior_version}.{p}" for p in patch_versions] - allowed_versions = [ - f"{major_minor}.{p}" for major_minor in allowed_versions for p in patch_versions - ] - for req in self: - if any( - req.specifier.contains(prior) for prior in prior_versions # type: ignore[attr-defined] - ): - return False - if not any( - req.specifier.contains(allowed) for allowed in allowed_versions # type: ignore[attr-defined] - ): - return False - return True - - def requires_python38_or_newer(self) -> bool: - """Checks if the constraints are all for Python 3.8+. - - This will return False if Python 3.8 is allowed, but prior versions like 3.7 are also - allowed. - """ - return self._requires_python3_version_or_newer( - allowed_versions=["3.8", "3.9", "3.10"], prior_version="3.7" - ) - - def __str__(self) -> str: - return " OR ".join(str(constraint) for constraint in self) - - def debug_hint(self) -> str: - return str(self) - - class PexPlatforms(DeduplicatedCollection[str]): sort_input = True @@ -320,7 +96,7 @@ class PexRequest(EngineAwareParameter): output_filename: str internal_only: bool requirements: PexRequirements - interpreter_constraints: PexInterpreterConstraints + interpreter_constraints: InterpreterConstraints platforms: PexPlatforms sources: Digest | None additional_inputs: Digest | None @@ -337,7 +113,7 @@ def __init__( output_filename: str, internal_only: bool, requirements: PexRequirements = PexRequirements(), - interpreter_constraints=PexInterpreterConstraints(), + interpreter_constraints=InterpreterConstraints(), platforms=PexPlatforms(), sources: Digest | None = None, additional_inputs: Digest | None = None, @@ -442,7 +218,7 @@ class TwoStepPex: @rule(desc="Find Python interpreter for constraints", level=LogLevel.DEBUG) async def find_interpreter( - interpreter_constraints: PexInterpreterConstraints, pex_runtime_env: PexRuntimeEnvironment + interpreter_constraints: InterpreterConstraints, pex_runtime_env: PexRuntimeEnvironment ) -> PythonExecutable: formatted_constraints = " OR ".join(str(constraint) for constraint in interpreter_constraints) result = await Get( @@ -602,7 +378,7 @@ async def build_pex( # will have already validated that there were no platforms. if request.internal_only: python = await Get( - PythonExecutable, PexInterpreterConstraints, request.interpreter_constraints + PythonExecutable, InterpreterConstraints, request.interpreter_constraints ) argv.append("--no-emit-warnings") diff --git a/src/python/pants/backend/python/util_rules/pex_from_targets.py b/src/python/pants/backend/python/util_rules/pex_from_targets.py index 14919df6665..b1f57e967e4 100644 --- a/src/python/pants/backend/python/util_rules/pex_from_targets.py +++ b/src/python/pants/backend/python/util_rules/pex_from_targets.py @@ -17,10 +17,10 @@ PythonRequirementsField, parse_requirements_file, ) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( MaybeConstraintsFile, Pex, - PexInterpreterConstraints, PexPlatforms, PexRequest, PexRequirements, @@ -63,7 +63,7 @@ class PexFromTargetsRequest: include_source_files: bool additional_sources: Digest | None additional_inputs: Digest | None - hardcoded_interpreter_constraints: PexInterpreterConstraints | None + hardcoded_interpreter_constraints: InterpreterConstraints | None direct_deps_only: bool # This field doesn't participate in comparison (and therefore hashing), as it doesn't affect # the result. @@ -82,7 +82,7 @@ def __init__( include_source_files: bool = True, additional_sources: Digest | None = None, additional_inputs: Digest | None = None, - hardcoded_interpreter_constraints: PexInterpreterConstraints | None = None, + hardcoded_interpreter_constraints: InterpreterConstraints | None = None, direct_deps_only: bool = False, description: str | None = None, ) -> None: @@ -138,7 +138,7 @@ def for_requirements( addresses: Iterable[Address], *, internal_only: bool, - hardcoded_interpreter_constraints: PexInterpreterConstraints | None = None, + hardcoded_interpreter_constraints: InterpreterConstraints | None = None, zip_safe: bool = False, direct_deps_only: bool = False, ) -> PexFromTargetsRequest: @@ -210,12 +210,12 @@ async def pex_from_targets( if request.hardcoded_interpreter_constraints: interpreter_constraints = request.hardcoded_interpreter_constraints else: - calculated_constraints = PexInterpreterConstraints.create_from_targets( + calculated_constraints = InterpreterConstraints.create_from_targets( all_targets, python_setup ) # If there are no targets, we fall back to the global constraints. This is relevant, # for example, when running `./pants repl` with no specs. - interpreter_constraints = calculated_constraints or PexInterpreterConstraints( + interpreter_constraints = calculated_constraints or InterpreterConstraints( python_setup.interpreter_constraints ) diff --git a/src/python/pants/backend/python/util_rules/pex_test.py b/src/python/pants/backend/python/util_rules/pex_test.py index 1da7b7d7e2b..cdcb2a45f8a 100644 --- a/src/python/pants/backend/python/util_rules/pex_test.py +++ b/src/python/pants/backend/python/util_rules/pex_test.py @@ -8,7 +8,7 @@ import textwrap import zipfile from dataclasses import dataclass -from typing import Dict, Iterable, Iterator, List, Mapping, Tuple, cast +from typing import Dict, Iterable, Iterator, Mapping, Tuple, cast import pytest from packaging.specifiers import SpecifierSet @@ -17,15 +17,14 @@ from pants.backend.python.target_types import ( EntryPoint, - InterpreterConstraintsField, MainSpecification, PythonRequirementConstraints, ) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.pex import ( MaybeConstraintsFile, Pex, PexDistributionInfo, - PexInterpreterConstraints, PexPlatforms, PexProcess, PexRequest, @@ -37,262 +36,11 @@ ) from pants.backend.python.util_rules.pex import rules as pex_rules from pants.backend.python.util_rules.pex_cli import PexPEX -from pants.engine.addresses import Address from pants.engine.fs import EMPTY_DIGEST, CreateDigest, Digest, FileContent from pants.engine.process import Process, ProcessResult from pants.engine.rules import SubsystemRule -from pants.engine.target import FieldSet from pants.python.python_setup import PythonSetup -from pants.testutil.option_util import create_subsystem from pants.testutil.rule_runner import QueryRule, RuleRunner -from pants.util.frozendict import FrozenDict - - -def test_merge_interpreter_constraints() -> None: - def assert_merged(*, inp: List[List[str]], expected: List[str]) -> None: - result = sorted(str(req) for req in PexInterpreterConstraints.merge_constraint_sets(inp)) - # Requirement.parse() sorts specs differently than we'd like, so we convert each str to a - # Requirement. - normalized_expected = sorted(str(Requirement.parse(v)) for v in expected) - assert result == normalized_expected - - # Multiple constraint sets get merged so that they are ANDed. - # A & B => A & B - assert_merged(inp=[["CPython==2.7.*"], ["CPython==3.6.*"]], expected=["CPython==2.7.*,==3.6.*"]) - - # Multiple constraints within a single constraint set are kept separate so that they are ORed. - # A | B => A | B - assert_merged( - inp=[["CPython==2.7.*", "CPython==3.6.*"]], expected=["CPython==2.7.*", "CPython==3.6.*"] - ) - - # Input constraints already were ANDed. - # A => A - assert_merged(inp=[["CPython>=2.7,<3"]], expected=["CPython>=2.7,<3"]) - - # Both AND and OR. - # (A | B) & C => (A & B) | (B & C) - assert_merged( - inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*"]], - expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], - ) - # A & B & (C | D) => (A & B & C) | (A & B & D) - assert_merged( - inp=[["CPython==2.7.*"], ["CPython==3.6.*"], ["CPython==3.7.*", "CPython==3.8.*"]], - expected=["CPython==2.7.*,==3.6.*,==3.7.*", "CPython==2.7.*,==3.6.*,==3.8.*"], - ) - # (A | B) & (C | D) => (A & C) | (A & D) | (B & C) | (B & D) - assert_merged( - inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*", "CPython==3.7.*"]], - expected=[ - "CPython>=2.7,<3,==3.6.*", - "CPython>=2.7,<3,==3.7.*", - "CPython>=3.5,==3.6.*", - "CPython>=3.5,==3.7.*", - ], - ) - # A & (B | C | D) & (E | F) & G => - # (A & B & E & G) | (A & B & F & G) | (A & C & E & G) | (A & C & F & G) | (A & D & E & G) | (A & D & F & G) - assert_merged( - inp=[ - ["CPython==3.6.5"], - ["CPython==2.7.14", "CPython==2.7.15", "CPython==2.7.16"], - ["CPython>=3.6", "CPython==3.5.10"], - ["CPython>3.8"], - ], - expected=[ - "CPython==2.7.14,==3.5.10,==3.6.5,>3.8", - "CPython==2.7.14,>=3.6,==3.6.5,>3.8", - "CPython==2.7.15,==3.5.10,==3.6.5,>3.8", - "CPython==2.7.15,>=3.6,==3.6.5,>3.8", - "CPython==2.7.16,==3.5.10,==3.6.5,>3.8", - "CPython==2.7.16,>=3.6,==3.6.5,>3.8", - ], - ) - - # Deduplicate between constraint_sets - # (A | B) & (A | B) => A | B. Naively, this should actually resolve as follows: - # (A | B) & (A | B) => (A & A) | (A & B) | (B & B) => A | (A & B) | B. - # But, we first deduplicate each constraint_set. (A | B) & (A | B) can be rewritten as - # X & X => X. - assert_merged( - inp=[["CPython==2.7.*", "CPython==3.6.*"], ["CPython==2.7.*", "CPython==3.6.*"]], - expected=["CPython==2.7.*", "CPython==3.6.*"], - ) - # (A | B) & C & (A | B) => (A & C) | (B & C). Alternatively, this can be rewritten as - # X & Y & X => X & Y. - assert_merged( - inp=[ - ["CPython>=2.7,<3", "CPython>=3.5"], - ["CPython==3.6.*"], - ["CPython>=3.5", "CPython>=2.7,<3"], - ], - expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], - ) - - # No specifiers - assert_merged(inp=[["CPython"]], expected=["CPython"]) - assert_merged(inp=[["CPython"], ["CPython==3.7.*"]], expected=["CPython==3.7.*"]) - - # No interpreter is shorthand for CPython, which is how Pex behaves - assert_merged(inp=[[">=3.5"], ["CPython==3.7.*"]], expected=["CPython>=3.5,==3.7.*"]) - - # Different Python interpreters, which are guaranteed to fail when ANDed but are safe when ORed. - with pytest.raises(ValueError): - PexInterpreterConstraints.merge_constraint_sets([["CPython==3.7.*"], ["PyPy==43.0"]]) - assert_merged(inp=[["CPython==3.7.*", "PyPy==43.0"]], expected=["CPython==3.7.*", "PyPy==43.0"]) - - # Ensure we can handle empty input. - assert_merged(inp=[], expected=[]) - - -@pytest.mark.parametrize( - "constraints", - [ - ["CPython>=2.7,<3"], - ["CPython>=2.7,<3", "CPython>=3.6"], - ["CPython>=2.7.13"], - ["CPython>=2.7.13,<2.7.16"], - ["CPython>=2.7.13,!=2.7.16"], - ["PyPy>=2.7,<3"], - ], -) -def test_interpreter_constraints_includes_python2(constraints) -> None: - assert PexInterpreterConstraints(constraints).includes_python2() is True - - -@pytest.mark.parametrize( - "constraints", - [ - ["CPython>=3.6"], - ["CPython>=3.7"], - ["CPython>=3.6", "CPython>=3.8"], - ["CPython!=2.7.*"], - ["PyPy>=3.6"], - ], -) -def test_interpreter_constraints_do_not_include_python2(constraints): - assert PexInterpreterConstraints(constraints).includes_python2() is False - - -@pytest.mark.parametrize( - "constraints,expected", - [ - (["CPython>=2.7"], "2.7"), - (["CPython>=3.5"], "3.5"), - (["CPython>=3.6"], "3.6"), - (["CPython>=3.7"], "3.7"), - (["CPython>=3.8"], "3.8"), - (["CPython>=3.9"], "3.9"), - (["CPython>=3.10"], "3.10"), - (["CPython==2.7.10"], "2.7"), - (["CPython==3.5.*", "CPython>=3.6"], "3.5"), - (["CPython==2.6.*"], None), - ], -) -def test_interpreter_constraints_minimum_python_version( - constraints: List[str], expected: str -) -> None: - assert PexInterpreterConstraints(constraints).minimum_python_version() == expected - - -@pytest.mark.parametrize( - "constraints", - [ - ["CPython==3.8.*"], - ["CPython==3.8.1"], - ["CPython==3.9.1"], - ["CPython>=3.8"], - ["CPython>=3.9"], - ["CPython>=3.10"], - ["CPython==3.8.*", "CPython==3.9.*"], - ["PyPy>=3.8"], - ], -) -def test_interpreter_constraints_require_python38(constraints) -> None: - assert PexInterpreterConstraints(constraints).requires_python38_or_newer() is True - - -@pytest.mark.parametrize( - "constraints", - [ - ["CPython==3.5.*"], - ["CPython==3.6.*"], - ["CPython==3.7.*"], - ["CPython==3.7.3"], - ["CPython>=3.7"], - ["CPython==3.7.*", "CPython==3.8.*"], - ["CPython==3.5.3", "CPython==3.8.3"], - ["PyPy>=3.7"], - ], -) -def test_interpreter_constraints_do_not_require_python38(constraints): - assert PexInterpreterConstraints(constraints).requires_python38_or_newer() is False - - -@dataclass(frozen=True) -class MockFieldSet(FieldSet): - interpreter_constraints: InterpreterConstraintsField - - @classmethod - def create_for_test(cls, address: Address, compat: str | None) -> MockFieldSet: - return cls( - address=address, - interpreter_constraints=InterpreterConstraintsField( - [compat] if compat else None, address=address - ), - ) - - -def test_group_field_sets_by_constraints() -> None: - py2_fs = MockFieldSet.create_for_test(Address("", target_name="py2"), ">=2.7,<3") - py3_fs = [ - MockFieldSet.create_for_test(Address("", target_name="py3"), "==3.6.*"), - MockFieldSet.create_for_test(Address("", target_name="py3_second"), "==3.6.*"), - ] - no_constraints_fs = MockFieldSet.create_for_test( - Address("", target_name="no_constraints"), None - ) - assert PexInterpreterConstraints.group_field_sets_by_constraints( - [py2_fs, *py3_fs, no_constraints_fs], - python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), - ) == FrozenDict( - { - PexInterpreterConstraints(): (no_constraints_fs,), - PexInterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs,), - PexInterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), - } - ) - - -def test_group_field_sets_by_constraints_with_unsorted_inputs() -> None: - py3_fs = [ - MockFieldSet.create_for_test( - Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*" - ), - MockFieldSet.create_for_test( - Address("src/python/b_dir/path.py", target_name="test"), ">2.7,<3" - ), - MockFieldSet.create_for_test( - Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*" - ), - ] - - ic_36 = PexInterpreterConstraints([Requirement.parse("CPython==3.6.*")]) - - output = PexInterpreterConstraints.group_field_sets_by_constraints( - py3_fs, - python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), - ) - - assert output[ic_36] == ( - MockFieldSet.create_for_test( - Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*" - ), - MockFieldSet.create_for_test( - Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*" - ), - ) def test_maybe_constraints_file() -> None: @@ -377,7 +125,7 @@ def create_pex_and_get_all_data( pex_type: type[Pex | VenvPex] = Pex, requirements: PexRequirements = PexRequirements(), main: MainSpecification | None = None, - interpreter_constraints: PexInterpreterConstraints = PexInterpreterConstraints(), + interpreter_constraints: InterpreterConstraints = InterpreterConstraints(), platforms: PexPlatforms = PexPlatforms(), sources: Digest | None = None, additional_inputs: Digest | None = None, @@ -456,7 +204,7 @@ def create_pex_and_get_pex_info( pex_type: type[Pex | VenvPex] = Pex, requirements: PexRequirements = PexRequirements(), main: MainSpecification | None = None, - interpreter_constraints: PexInterpreterConstraints = PexInterpreterConstraints(), + interpreter_constraints: InterpreterConstraints = InterpreterConstraints(), platforms: PexPlatforms = PexPlatforms(), sources: Digest | None = None, additional_pants_args: Tuple[str, ...] = (), @@ -541,7 +289,7 @@ def test_pex_environment(rule_runner: RuleRunner, pex_type: type[Pex | VenvPex]) "--subprocess-environment-env-vars=LANG", # Value should come from environment. "--subprocess-environment-env-vars=ftp_proxy=dummyproxy", ), - interpreter_constraints=PexInterpreterConstraints(["CPython>=3.6"]), + interpreter_constraints=InterpreterConstraints(["CPython>=3.6"]), env={"LANG": "es_PY.UTF-8"}, ) @@ -614,7 +362,7 @@ def test_entry_point(rule_runner: RuleRunner) -> None: def test_interpreter_constraints(rule_runner: RuleRunner) -> None: - constraints = PexInterpreterConstraints(["CPython>=2.7,<3", "CPython>=3.6"]) + constraints = InterpreterConstraints(["CPython>=2.7,<3", "CPython>=3.6"]) pex_info = create_pex_and_get_pex_info( rule_runner, interpreter_constraints=constraints, internal_only=False ) @@ -630,7 +378,7 @@ def test_platforms(rule_runner: RuleRunner) -> None: # We use Python 2.7, rather than Python 3, to ensure that the specified platform is # actually used. platforms = PexPlatforms(["linux-x86_64-cp-27-cp27mu"]) - constraints = PexInterpreterConstraints(["CPython>=2.7,<3", "CPython>=3.6"]) + constraints = InterpreterConstraints(["CPython>=2.7,<3", "CPython>=3.6"]) pex_output = create_pex_and_get_all_data( rule_runner, requirements=PexRequirements(["cryptography==2.9"]), diff --git a/src/python/pants/init/plugin_resolver.py b/src/python/pants/init/plugin_resolver.py index 782f94f02a8..b71169640ae 100644 --- a/src/python/pants/init/plugin_resolver.py +++ b/src/python/pants/init/plugin_resolver.py @@ -9,13 +9,8 @@ from pkg_resources import WorkingSet from pkg_resources import working_set as global_working_set -from pants.backend.python.util_rules.pex import ( - PexInterpreterConstraints, - PexRequest, - PexRequirements, - VenvPex, - VenvPexProcess, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess from pants.engine.collection import DeduplicatedCollection from pants.engine.environment import CompleteEnvironment from pants.engine.internals.session import SessionValues @@ -39,7 +34,7 @@ class ResolvedPluginDistributions(DeduplicatedCollection[str]): @rule async def resolve_plugins( - interpreter_constraints: PexInterpreterConstraints, global_options: GlobalOptions + interpreter_constraints: InterpreterConstraints, global_options: GlobalOptions ) -> ResolvedPluginDistributions: """This rule resolves plugins using a VenvPex, and exposes the absolute paths of their dists. @@ -90,13 +85,13 @@ class PluginResolver: def __init__( self, scheduler: BootstrapScheduler, - interpreter_constraints: Optional[PexInterpreterConstraints] = None, + interpreter_constraints: Optional[InterpreterConstraints] = None, ) -> None: self._scheduler = scheduler self._interpreter_constraints = ( interpreter_constraints if interpreter_constraints is not None - else PexInterpreterConstraints([f"=={'.'.join(map(str, sys.version_info[:3]))}"]) + else InterpreterConstraints([f"=={'.'.join(map(str, sys.version_info[:3]))}"]) ) def resolve( @@ -142,6 +137,6 @@ def _resolve_plugins( def rules(): return [ - QueryRule(ResolvedPluginDistributions, [PexInterpreterConstraints]), + QueryRule(ResolvedPluginDistributions, [InterpreterConstraints]), *collect_rules(), ] diff --git a/tests/python/pants_test/init/test_plugin_resolver.py b/tests/python/pants_test/init/test_plugin_resolver.py index 09628223993..e1f6eaabca2 100644 --- a/tests/python/pants_test/init/test_plugin_resolver.py +++ b/tests/python/pants_test/init/test_plugin_resolver.py @@ -13,13 +13,8 @@ from pkg_resources import Requirement, WorkingSet from pants.backend.python.util_rules import pex -from pants.backend.python.util_rules.pex import ( - Pex, - PexInterpreterConstraints, - PexProcess, - PexRequest, - PexRequirements, -) +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import Pex, PexProcess, PexRequest, PexRequirements from pants.core.util_rules import archive, external_tool from pants.engine.environment import CompleteEnvironment from pants.engine.fs import CreateDigest, Digest, FileContent, MergeDigests, Snapshot @@ -62,7 +57,7 @@ def rule_runner() -> RuleRunner: def _create_pex( rule_runner: RuleRunner, - interpreter_constraints: PexInterpreterConstraints, + interpreter_constraints: InterpreterConstraints, ) -> Pex: request = PexRequest( output_filename="setup-py-runner.pex", @@ -76,7 +71,7 @@ def _create_pex( def _run_setup_py( rule_runner: RuleRunner, plugin: str, - interpreter_constraints: PexInterpreterConstraints, + interpreter_constraints: InterpreterConstraints, version: Optional[str], setup_py_args: Iterable[str], install_dir: str, @@ -128,9 +123,9 @@ def provide_chroot(existing): yield new_chroot, True interpreter_constraints = ( - PexInterpreterConstraints([f"=={interpreter.identity.version_str}"]) + InterpreterConstraints([f"=={interpreter.identity.version_str}"]) if interpreter - else PexInterpreterConstraints([">=3.7"]) + else InterpreterConstraints([">=3.7"]) ) with provide_chroot(chroot) as (root_dir, create_artifacts):