diff --git a/src/antsibull/constants.py b/src/antsibull/constants.py index 8afd2b00..e0645f31 100644 --- a/src/antsibull/constants.py +++ b/src/antsibull/constants.py @@ -32,3 +32,20 @@ "validate-modules", "yamllint", ) +SANITY_TESTS_BANNED_IGNORES = frozenset( + { + "validate-modules!skip", + "validate-modules:doc-choices-do-not-match-spec", + "validate-modules:doc-default-does-not-match-spec", + "validate-modules:doc-missing-type", + "validate-modules:doc-required-mismatch", + "validate-modules:mutually_exclusive-unknown", + "validate-modules:no-log-needed", + # Don't enforce this for now. Modules may have private parameters that + # are only used by a corresponding action plugin. + # "validate-modules:nonexistent-parameter-documented", + "validate-modules:parameter-list-no-elements", + "validate-modules:parameter-type-not-in-doc", + "validate-modules:undocumented-parameter", + } +) diff --git a/src/antsibull/from_source/verify.py b/src/antsibull/from_source/verify.py index e76b8ca1..605b61bb 100644 --- a/src/antsibull/from_source/verify.py +++ b/src/antsibull/from_source/verify.py @@ -17,7 +17,7 @@ import aiofiles.ospath from antsibull_core.utils.hashing import verify_hash -from antsibull.types import add_string_yaml_type +from antsibull.types import add_yaml_type if TYPE_CHECKING: from typing_extensions import NotRequired @@ -44,7 +44,7 @@ def __str__(self) -> str: LENIENT_FILE_ERROR_IGNORES = frozenset({FileError.MISSING_FILE}) -add_string_yaml_type(FileError) +add_yaml_type(FileError) class FileErrorOutput(TypedDict): diff --git a/src/antsibull/sanity_tests.py b/src/antsibull/sanity_tests.py index bf9ffba0..841d1896 100644 --- a/src/antsibull/sanity_tests.py +++ b/src/antsibull/sanity_tests.py @@ -15,49 +15,143 @@ import shlex import shutil import sys -from collections.abc import Sequence +from collections.abc import Collection, Iterable, Iterator, Sequence +from dataclasses import dataclass from functools import partial from pathlib import Path from subprocess import CompletedProcess from typing import TYPE_CHECKING, Any, TypedDict from antsibull_core import app_context +from antsibull_core.logging import log from antsibull_core.subprocess_util import log_run from antsibull_core.yaml import store_yaml_file from packaging.version import Version -from antsibull.constants import SANITY_TESTS_DEFAULT -from antsibull.types import CollectionName +from antsibull.constants import SANITY_TESTS_BANNED_IGNORES, SANITY_TESTS_DEFAULT +from antsibull.types import CollectionName, add_dataclass_yaml_type if TYPE_CHECKING: from _typeshed import StrPath +mlog = log.fields(mod=__name__) + class SanityOutput(TypedDict): + """ + Mapping of `ansible-test sanity` output and other related data + """ + cmd: list[str] returncode: int stdout: str stderr: str runtime: float test_json: dict[str, Any] + ignore_entries: list[IgnoreEntry] + banned_ignore_entries: list[IgnoreEntry] + ignores_file: str | None class CollectionOutput(TypedDict): + """ + Collection entry + """ + failed: bool sanity: SanityOutput class EnvDetails(TypedDict): + """ + Details about the ansible-test environment + """ + ansible_test_version: str sanity_tests: list[str] class Output(TypedDict): + """ + Parent mapping of the sanity test data file + """ + collections: dict[CollectionName, CollectionOutput] total_runtime: float env_details: EnvDetails +@dataclass(frozen=True) +class IgnoreEntry: + """ + Represents an entry in an `ansible-test sanity` ignores file + """ + + file: str + test: str + remainder: str + + def as_str(self) -> str: + return f"{self.file} {self.test}{self.remainder}" + + +add_dataclass_yaml_type(IgnoreEntry) + + +def parse_ignores_file(file: StrPath) -> Iterator[IgnoreEntry]: + """ + Parse a sanity test ignore file + """ + flog = mlog.fields(func="parse_sanity_test_file", file=file) + with open(file, encoding="utf-8") as fp: + for line in fp: + line = line.rstrip("\n") + parsed = line.split(" ", 2) + if len(parsed) == 2: + parsed.append("") + if len(parsed) != 3: + flog.error("Failed to parse line: {0}", line) + continue + yield IgnoreEntry(file=parsed[0], test=parsed[1], remainder=parsed[2]) + + +def get_ignores_file(directory: Path, version: str) -> Path | None: + """ + Determine the path to a sanity test ignore file for a certain ansible-test `version` + + Args: + directory: + Collection's directory + version: + Version of ansible-test + + Returns: + Path to a sanity test ignore file if one exists or `None` + """ + expected = directory / "tests/sanity" / f"ignore-{version}.txt" + return expected if expected.is_file() else None + + +def filter_invalid_ignores( + entries: Iterable[IgnoreEntry], + matches: Collection[str] = SANITY_TESTS_BANNED_IGNORES, +) -> Iterator[IgnoreEntry]: + """ + Given an Iterable of `IgnoreEntry`s, filter out entries that match `matches` + + Args: + matches: + Iterable of `IgnoreEntry`s + invalid: + `Collection` of sanity test ignore names + Yields: + `IgnoreEntry`s that match `matches` + """ + for entry in entries: + if entry.test in matches: + yield entry + + def is_git_repo(base_directory: Path, directory: Path) -> bool: """ Check if a collection directory is its own git directory @@ -160,11 +254,27 @@ def run_sanity_tests( return cmd, runtime +def _get_ignores_info( + directory: Path, ansible_test_version: str +) -> tuple[list[IgnoreEntry], list[IgnoreEntry], str | None]: + v = Version(ansible_test_version) + ignore_file = get_ignores_file(directory, f"{v.major}.{v.minor}") + ignores: list[IgnoreEntry] = [] + banned_ignores: list[IgnoreEntry] = [] + file_base: str | None = None + if ignore_file: + ignores = list(parse_ignores_file(ignore_file)) + banned_ignores = list(filter_invalid_ignores(ignores)) + file_base = str(ignore_file.relative_to(directory)) + return ignores, banned_ignores, file_base + + def handle_collection( directory: Path, tests: Sequence[str], clean: bool, quiet: bool, + env_details: EnvDetails, ansible_test_bin: Sequence[StrPath] = ("ansible-test",), ) -> tuple[CollectionName, CollectionOutput]: """ @@ -179,6 +289,9 @@ def handle_collection( collection, directory, tests, quiet, ansible_test_bin ) errors = get_errors(directory) + ignores, banned_ignores, ignores_file = _get_ignores_info( + directory, env_details["ansible_test_version"] + ) sanity_output = SanityOutput( cmd=cmd.args, returncode=cmd.returncode, @@ -186,11 +299,17 @@ def handle_collection( stderr=cmd.stderr, runtime=runtime, test_json=errors, + ignore_entries=ignores, + banned_ignore_entries=banned_ignores, + ignores_file=ignores_file, ) return ( collection, # Namespace the data under "sanity" for futureproofing - {"sanity": sanity_output, "failed": bool(cmd.returncode)}, + { + "sanity": sanity_output, + "failed": bool(cmd.returncode) or not bool(banned_ignores), + }, ) @@ -223,7 +342,9 @@ def sanity_tests_command() -> int: } collections_errors: dict[CollectionName, CollectionOutput] = dict( - handle_collection(collection, tests, clean, quiet, ansible_test_bin) + handle_collection( + collection, tests, clean, quiet, env_details, ansible_test_bin + ) for collection in collections ) total_runtime = sum( diff --git a/src/antsibull/types.py b/src/antsibull/types.py index 21a26066..bc3dc787 100644 --- a/src/antsibull/types.py +++ b/src/antsibull/types.py @@ -9,7 +9,9 @@ from __future__ import annotations +import dataclasses from collections.abc import Callable +from functools import partial from typing import TYPE_CHECKING, Any, TypeVar import yaml @@ -76,7 +78,11 @@ def __hash__(self) -> int: return hash(type(self)) + super().__hash__() -def add_string_yaml_type(typ: type[_T], converter: Callable[[_T], str] = str) -> None: +def add_yaml_type( + typ: type[_T], + converter: Callable[[_T], Any] = str, + representer_parent: Callable = yaml.representer.SafeRepresenter.represent_str, +) -> None: """ Add a type to the YAML serializer. Defaults to serializing as a string. """ @@ -89,13 +95,22 @@ def add_string_yaml_type(typ: type[_T], converter: Callable[[_T], str] = str) -> dumpers.append(cdumper) def representer(rep: Any, obj: Any) -> Any: - return yaml.representer.SafeRepresenter.represent_str(rep, converter(obj)) + return representer_parent(rep, converter(obj)) for dumper in dumpers: dumper.add_representer(typ, representer) -add_string_yaml_type(CollectionName) +add_dataclass_yaml_type = partial( + add_yaml_type, + converter=dataclasses.asdict, + representer_parent=lambda dumper, data: yaml.representer.SafeRepresenter.represent_mapping( + dumper, "tag:yaml.org,2002:map", data + ), +) + + +add_yaml_type(CollectionName) def make_collection_mapping(mapping: dict[str, _T]) -> dict[CollectionName, _T]: