Skip to content

Commit

Permalink
Merge pull request #160 from notatallshaw/switch-from-flake8-to-ruff
Browse files Browse the repository at this point in the history
Switch from flake8 to ruff - and run various linting cleanups
  • Loading branch information
frostming authored Aug 1, 2024
2 parents e4de27e + f2891e1 commit 2410b7e
Show file tree
Hide file tree
Showing 13 changed files with 77 additions and 91 deletions.
4 changes: 2 additions & 2 deletions examples/extras_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,15 @@ def get_base_requirement(self, candidate):
raise NotImplementedError

def identify(self, requirement_or_candidate):
base = super(ExtrasProvider, self).identify(requirement_or_candidate)
base = super().identify(requirement_or_candidate)
extras = self.get_extras_for(requirement_or_candidate)
if extras:
return (base, extras)
else:
return base

def get_dependencies(self, candidate):
deps = super(ExtrasProvider, self).get_dependencies(candidate)
deps = super().get_dependencies(candidate)
if candidate.extras:
req = self.get_base_requirement(candidate)
deps.append(req)
Expand Down
4 changes: 2 additions & 2 deletions examples/pypi_wheel_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def dependencies(self):

def get_project_from_pypi(project, extras):
"""Return candidates created from the project name and extras."""
url = "https://pypi.org/simple/{}".format(project)
url = f"https://pypi.org/simple/{project}"
data = requests.get(url).content
doc = html5lib.parse(data, namespaceHTMLElements=False)
for i in doc.findall(".//a"):
Expand Down Expand Up @@ -120,7 +120,7 @@ def get_extras_for(self, requirement_or_candidate):
return tuple(sorted(requirement_or_candidate.extras))

def get_base_requirement(self, candidate):
return Requirement("{}=={}".format(candidate.name, candidate.version))
return Requirement(f"{candidate.name}=={candidate.version}")

def get_preference(self, identifier, resolutions, candidates, information):
return sum(1 for _ in candidates[identifier])
Expand Down
12 changes: 4 additions & 8 deletions examples/reporter_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,14 @@
"""


class Requirement(namedtuple("Requirement", "name specifier")): # noqa
class Requirement(namedtuple("Requirement", "name specifier")):
def __repr__(self):
return "<Requirement({name}{specifier})>".format(
name=self.name, specifier=self.specifier
)
return f"<Requirement({self.name}{self.specifier})>"


class Candidate(namedtuple("Candidate", "name version")): # noqa
class Candidate(namedtuple("Candidate", "name version")):
def __repr__(self):
return "<{name}=={version}>".format(
name=self.name, version=self.version
)
return f"<{self.name}=={self.version}>"


def splitstrip(s, parts):
Expand Down
2 changes: 1 addition & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def lint(session):

session.run("black", "--check", ".")
session.run("isort", ".")
session.run("flake8", ".")
session.run("ruff", "check", ".")
session.run("mypy", "src", "tests")


Expand Down
13 changes: 7 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ Homepage = "https://github.com/sarugaku/resolvelib"
[project.optional-dependencies]
lint = [
"black==23.12.1",
"flake8",
"Flake8-pyproject",
"ruff",
"isort",
"mypy",
"types-requests",
Expand Down Expand Up @@ -92,10 +91,12 @@ directory = 'trivial'
name = 'Trivial Changes'
showcontent = false

[tool.flake8]
max-line-length = 88
select = ["C","E","F","W","B"]
ignore = ["E203", "W503", "F401"]
[tool.ruff]
line-length = 88

[tool.ruff.lint]
select = ["C","E","F","W","B","RUF","PLE","PLW"]
ignore = ["PLW2901"]
exclude = [
".git",
".venv",
Expand Down
Empty file removed src/resolvelib/compat/__init__.py
Empty file.
6 changes: 0 additions & 6 deletions src/resolvelib/compat/collections_abc.py

This file was deleted.

1 change: 0 additions & 1 deletion src/resolvelib/compat/collections_abc.pyi

This file was deleted.

76 changes: 39 additions & 37 deletions src/resolvelib/resolvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ def __init__(
def state(self) -> State[RT, CT, KT]:
try:
return self._states[-1]
except IndexError:
raise AttributeError("state")
except IndexError as e:
raise AttributeError("state") from e

def _push_new_state(self) -> None:
"""Push a new state into history.
Expand Down Expand Up @@ -269,6 +269,41 @@ def _attempt_to_pin_criterion(self, name: KT) -> list[Criterion[RT, CT]]:
# end, signal for backtracking.
return causes

def _patch_criteria(
self, incompatibilities_from_broken: list[tuple[KT, list[CT]]]
) -> bool:
# Create a new state from the last known-to-work one, and apply
# the previously gathered incompatibility information.
for k, incompatibilities in incompatibilities_from_broken:
if not incompatibilities:
continue
try:
criterion = self.state.criteria[k]
except KeyError:
continue
matches = self._p.find_matches(
identifier=k,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{k: incompatibilities},
),
)
candidates: IterableView[CT] = build_iter_view(matches)
if not candidates:
return False
incompatibilities.extend(criterion.incompatibilities)
self.state.criteria[k] = Criterion(
candidates=candidates,
information=list(criterion.information),
incompatibilities=incompatibilities,
)
return True

def _backjump(self, causes: list[RequirementInformation[RT, CT]]) -> bool:
"""Perform backjumping.
Expand Down Expand Up @@ -347,41 +382,8 @@ def _backjump(self, causes: list[RequirementInformation[RT, CT]]) -> bool:
# Also mark the newly known incompatibility.
incompatibilities_from_broken.append((name, [candidate]))

# Create a new state from the last known-to-work one, and apply
# the previously gathered incompatibility information.
def _patch_criteria() -> bool:
for k, incompatibilities in incompatibilities_from_broken:
if not incompatibilities:
continue
try:
criterion = self.state.criteria[k]
except KeyError:
continue
matches = self._p.find_matches(
identifier=k,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{k: incompatibilities},
),
)
candidates: IterableView[CT] = build_iter_view(matches)
if not candidates:
return False
incompatibilities.extend(criterion.incompatibilities)
self.state.criteria[k] = Criterion(
candidates=candidates,
information=list(criterion.information),
incompatibilities=incompatibilities,
)
return True

self._push_new_state()
success = _patch_criteria()
success = self._patch_criteria(incompatibilities_from_broken)

# It works! Let's work on this new state.
if success:
Expand Down Expand Up @@ -421,7 +423,7 @@ def resolve(
try:
self._add_to_criteria(self.state.criteria, r, parent=None)
except RequirementsConflicted as e:
raise ResolutionImpossible(e.criterion.information)
raise ResolutionImpossible(e.criterion.information) from e

# The root state is saved as a sentinel so the first ever pin can have
# something to backtrack to if it fails. The root state is basically
Expand Down
2 changes: 0 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import print_function

import pytest

from resolvelib import BaseReporter
Expand Down
3 changes: 1 addition & 2 deletions tests/functional/cocoapods/test_resolvers_cocoapods.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,7 @@ def _clean_identifier(s):
def _iter_resolved(dependencies):
for entry in dependencies:
yield (entry["name"], Version(entry["version"]))
for sub in _iter_resolved(entry["dependencies"]):
yield sub
yield from _iter_resolved(entry["dependencies"])


class CocoaPodsInputProvider(AbstractProvider):
Expand Down
41 changes: 19 additions & 22 deletions tests/functional/python/py2index.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,10 @@
import urllib.parse
from typing import (
Dict,
FrozenSet,
Iterable,
Iterator,
List,
NamedTuple,
Optional,
Set,
Tuple,
Union,
)
Expand All @@ -64,15 +61,15 @@ def _parse_python_version(s: str) -> PythonVersion:
return (int(major),)


def _parse_output_path(s: str) -> Optional[pathlib.Path]:
def _parse_output_path(s: str) -> pathlib.Path | None:
if s == "-":
return None
if os.sep in s or (os.altsep and os.altsep in s):
return pathlib.Path(s)
return pathlib.Path(__file__).with_name("inputs").joinpath("index", s)


def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
def parse_args(args: list[str] | None) -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"package_names",
Expand Down Expand Up @@ -118,24 +115,24 @@ def get_output_path(path: pathlib.Path, overwrite: bool) -> pathlib.Path:
return path


def _parse_tag(s: str) -> FrozenSet[packaging.tags.Tag]:
def _parse_tag(s: str) -> frozenset[packaging.tags.Tag]:
try:
return packaging.tags.parse_tag(s)
except ValueError:
raise ValueError(f"invalid tag {s!r}")
except ValueError as e:
raise ValueError(f"invalid tag {s!r}") from e


@dataclasses.dataclass()
class WheelMatcher:
required_python: packaging.version.Version
tags: Dict[packaging.tags.Tag, int]
tags: dict[packaging.tags.Tag, int]

@classmethod
def compatible_with(
cls,
python_version: PythonVersion,
impl: Optional[str],
plats: Optional[List[str]],
impl: str | None,
plats: list[str] | None,
) -> WheelMatcher:
required_python = packaging.version.Version(
".".join(str(v) for v in python_version)
Expand All @@ -148,7 +145,7 @@ def compatible_with(
tags = {t: i for i, t in enumerate(tag_it)}
return cls(required_python, tags)

def rank(self, tag: str, requires_python: Optional[str]) -> Optional[int]:
def rank(self, tag: str, requires_python: str | None) -> int | None:
if requires_python:
spec = packaging.specifiers.SpecifierSet(requires_python)
if self.required_python not in spec:
Expand Down Expand Up @@ -197,27 +194,27 @@ def tell(self):
return self._offset


def _parse_wheel_name(rest: str) -> Tuple[str, str, str]:
def _parse_wheel_name(rest: str) -> tuple[str, str, str]:
name, rest = rest.split("-", 1)
version, x, y, z = rest.rsplit("-", 3)
return name, version, f"{x}-{y}-{z}"


class PackageEntry(NamedTuple):
version: str
dependencies: List[str]
dependencies: list[str]


DistListMapping = Dict[str, List[Tuple[int, str]]]


@dataclasses.dataclass()
class Finder:
index_urls: List[str]
index_urls: list[str]
matcher: WheelMatcher
session: requests.Session

def collect_best_metadta_urls(self, name: str) -> Dict[str, str]:
def collect_best_metadta_urls(self, name: str) -> dict[str, str]:
all_dists: DistListMapping = collections.defaultdict(list)
for index_url in self.index_urls:
res = requests.get(f"{index_url}/{name}")
Expand Down Expand Up @@ -259,12 +256,12 @@ def iter_package_entries(self, name: str) -> Iterator[PackageEntry]:
http_file = HttpFile(url, self.session)
parser = email.parser.BytesParser()
data = parser.parsebytes(http_file.read(), headersonly=True)
dependencies: List[str] = data.get_all("Requires-Dist", [])
dependencies: list[str] = data.get_all("Requires-Dist", [])
yield PackageEntry(version, dependencies)

def process_package_entry(
self, name: str, entry: PackageEntry
) -> Optional[Set[str]]:
) -> set[str] | None:
more = set()
for dep in entry.dependencies:
try:
Expand All @@ -283,10 +280,10 @@ def process_package_entry(
def find(self, package_names: Iterable[str]) -> dict:
data = {}
while package_names:
more: Set[str] = set()
more: set[str] = set()
logger.info("Discovering %s", ", ".join(package_names))
for name in package_names:
entries: Dict[str, dict] = {}
entries: dict[str, dict] = {}
for e in self.iter_package_entries(name):
result = self.process_package_entry(name, e)
if result is None:
Expand All @@ -298,10 +295,10 @@ def find(self, package_names: Iterable[str]) -> dict:
return data


def main(args: Optional[List[str]]) -> int:
def main(args: list[str] | None) -> int:
options = parse_args(args)
if not options.output:
output_path: Optional[pathlib.Path] = None
output_path: pathlib.Path | None = None
else:
output_path = get_output_path(options.output, options.overwrite)
matcher = WheelMatcher.compatible_with(
Expand Down
4 changes: 2 additions & 2 deletions tests/functional/python/test_resolvers_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def identify(self, requirement_or_candidate):
name = packaging.utils.canonicalize_name(requirement_or_candidate.name)
if requirement_or_candidate.extras:
extras_str = ",".join(sorted(requirement_or_candidate.extras))
return "{}[{}]".format(name, extras_str)
return f"{name}[{extras_str}]"
return name

def get_preference(
Expand Down Expand Up @@ -112,7 +112,7 @@ def is_satisfied_by(self, requirement, candidate):
def _iter_dependencies(self, candidate):
name = packaging.utils.canonicalize_name(candidate.name)
if candidate.extras:
r = "{}=={}".format(name, candidate.version)
r = f"{name}=={candidate.version}"
yield packaging.requirements.Requirement(r)
for r in self.index[name][str(candidate.version)]["dependencies"]:
requirement = packaging.requirements.Requirement(r)
Expand Down

0 comments on commit 2410b7e

Please sign in to comment.