Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move compliance checks to SDK #128

Open
wants to merge 25 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
3081d7d
Move compliance checks to SDK
lvoloshyn-sekoia Aug 5, 2024
fd90341
Fix default path
lvoloshyn-sekoia Aug 5, 2024
f909125
Fixes and improvements
lvoloshyn-sekoia Aug 5, 2024
83dd0cf
Fixes and improvements
lvoloshyn-sekoia Aug 5, 2024
1da1f31
Fix linting
lvoloshyn-sekoia Aug 5, 2024
6f4e736
Fixes for mypy
lvoloshyn-sekoia Aug 5, 2024
31f6019
Fixes for mypy
lvoloshyn-sekoia Aug 5, 2024
f8e2e4e
Fix tests
lvoloshyn-sekoia Aug 5, 2024
a94ea93
Enhance test fixtures
lvoloshyn-sekoia Aug 5, 2024
f36a820
Merge branch 'main' into lv/move_compliance_check_to_sdk
lvoloshyn-sekoia Aug 5, 2024
3a7e674
Improve linting
lvoloshyn-sekoia Aug 5, 2024
dcff650
Merge remote-tracking branch 'origin/lv/move_compliance_check_to_sdk'…
lvoloshyn-sekoia Aug 5, 2024
3686420
Fix test
lvoloshyn-sekoia Aug 5, 2024
794317d
Fix test
lvoloshyn-sekoia Aug 5, 2024
bb3dd89
Omit cov for action tester
lvoloshyn-sekoia Aug 6, 2024
428167f
Move compliance deps in a separate group
lvoloshyn-sekoia Sep 10, 2024
0052d25
Merge branch 'main' into lv/move_compliance_check_to_sdk
lvoloshyn-sekoia Sep 10, 2024
2ea3823
Update deps
lvoloshyn-sekoia Sep 10, 2024
9675651
Exclude compliance checks from coverage
lvoloshyn-sekoia Sep 10, 2024
2e84881
Merge branch 'main' into lv/move_compliance_check_to_sdk
lvoloshyn-sekoia Sep 10, 2024
7f06dbc
Update poetry.lock
lvoloshyn-sekoia Sep 10, 2024
8e1fef2
Merge branch 'main' into lv/move_compliance_check_to_sdk
lvoloshyn-sekoia Sep 20, 2024
46c87b9
Update poetry.lock
lvoloshyn-sekoia Sep 20, 2024
4b9a75d
Merge branch 'main' into lv/move_compliance_check_to_sdk
lvoloshyn-sekoia Oct 7, 2024
d5ef2e8
Update deps
lvoloshyn-sekoia Oct 7, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
relative_files = True
omit =
sekoia_automation/scripts/new_module/template/*
sekoia_automation/scripts/action_runner.py
1,950 changes: 1,082 additions & 868 deletions poetry.lock

Large diffs are not rendered by default.

8 changes: 6 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,10 @@ types-pyyaml = "^6.0.12.10"
types-python-slugify = "^8.0.0.2"
pre-commit = "^3.3.3"
jsonschema = "^4.22.0"
semver = "^3.0.2"
pillow = "^10.3.0"
numpy = "^1.26.4"
pyastgrep = "^1.3.2"
lvoloshyn-sekoia marked this conversation as resolved.
Show resolved Hide resolved

[tool.poetry.extras]
all = [
Expand Down Expand Up @@ -121,14 +125,14 @@ testpaths = [
]

[tool.ruff]
select = ["A", "ARG", "E", "F", "I", "N", "RUF", "UP", "W"]
lint.select = ["A", "ARG", "E", "F", "I", "N", "RUF", "UP", "W"]
exclude = [
"tests/expectations/sample_module/main.py",
"tests/aio/",
"sekoia_automation/scripts/new_module/template/"
]

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"tests/*" = ["ARG"] # Ignore unusued args because of pytest fixtures

[tool.mypy]
Expand Down
14 changes: 14 additions & 0 deletions sekoia_automation/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,5 +208,19 @@ def run_action(
print(module_runner.run(args=kwargs))


@app.command(name="check-compliance")
def check_compliance(
path: Path = typer.Option(
".", help="Path to the playbook module or folder with playbook modules"
),
fix: bool = typer.Option(False, "--fix"),
):
from .scripts.check_compliance import CheckCompliance

cc = CheckCompliance(path=path.resolve())

cc.run(fix=fix)


if __name__ == "__main__":
app()
273 changes: 273 additions & 0 deletions sekoia_automation/scripts/check_compliance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,273 @@
import json
from collections import defaultdict
from functools import partial
from pathlib import Path

from sekoia_automation.scripts.compliance.validators import ModuleValidator
from sekoia_automation.scripts.compliance.validators.models import CheckError


class CheckCompliance:
def __init__(self, path: Path) -> None:
if path.name == "automation-library":
# launched from a folder with all modules
self.modules_path = path
print(f"Looking for modules in {path}...")
self.all_modules = self.find_modules(self.modules_path)
self.modules = self.all_modules

elif path.parent.name == "automation-library":
# launched from individual module
print(f"Checking module in {path}")
self.modules_path = path.parent
self.all_modules = self.find_modules(self.modules_path)
self.modules = [path]

else:
raise ValueError(
"Please run script for a module folder or `automation-library` folder"
)

def run(self, fix: bool = False) -> None:
all_validators = []
selected_validators = []

errors_to_fix = []
has_any_errors = False

print(f"🔎 {len(self.modules)} module(s) found")

for module in self.all_modules:
r = self.check_module(module)
all_validators.append(r)

# We have to check all the modules, but
# show results only for the selected ones
if module in self.modules:
selected_validators.append(r)

self.check_uuids_and_slugs(all_validators)
self.check_docker_params(all_validators)

for r in selected_validators:
if r.result.errors:
for item in r.result.errors:
has_any_errors = True
if item.fix is not None:
errors_to_fix.append(item)

for res in sorted(selected_validators, key=lambda x: x.path):
if len(res.result.errors) > 0:
fmt = self.format_errors(res, ignored_paths=set())
if fmt:
print(fmt)

if not fix:
if len(errors_to_fix) > 0:
print()
print("🛠 Available automatic fixes (run with `fix` command):")
for error in errors_to_fix:
print(
f"FIX "
f"{error.filepath.relative_to(self.modules_path)}"
f":"
f"{error.fix_label}"
)

else:
if len(errors_to_fix) == 0:
print("There is nothing we can fix automatically")
print()
print("Fixing...")
for error in errors_to_fix:
print(
f"FIX "
f"{error.filepath.relative_to(self.modules_path)}"
f":"
f"{error.fix_label}"
)
error.fix()

if has_any_errors:
print("❌ Found errors")
exit(1)

else:
print("✅ No errors found!")

def check_module(self, module_path: str | Path):
if isinstance(module_path, str):
module_path = Path(module_path)

m = ModuleValidator(path=module_path)
m.validate()

return m

def format_errors(self, mod_val: ModuleValidator, ignored_paths: set[Path]) -> str:
errors = mod_val.result.errors
module_name = mod_val.path.name
return "\n".join(
f"{module_name}:{error.filepath.name}:{error.error}"
for error in errors
if error.filepath not in ignored_paths
)

def find_modules(self, root_path: Path) -> list[Path]:
result = []

for path in root_path.iterdir():
if (
path.is_dir()
and not path.name.startswith("_")
and not path.name.startswith(".")
and path.name not in ("docs",)
):
result.append(path)

return result

def fix_set_uuid(self, file_path: Path, uuid: str) -> None:
with open(file_path) as file:
manifest = json.load(file)

manifest["uuid"] = uuid

with open(file_path, "w") as file:
json.dump(manifest, file, indent=2)

def check_uniqueness(self, items, error_msg: str):
for k, v in items.items():
if len(v) > 1:
for file_name, val in v:
path = val.result.options["path"] / file_name

# We don't add fix call (e.g. generating new UUID)
# here, because it would create a lot of
# error-prone corner cases
val.result.errors.append(
CheckError(
filepath=path,
error=error_msg,
)
)

def check_docker_params(self, validators: list[ModuleValidator]):
for validator in validators:
actions_docker_params = defaultdict(list)
triggers_docker_params = defaultdict(list)
connectors_docker_params = defaultdict(list)

module_path = validator.result.options["path"]
docker_parameters = validator.result.options.get("docker_parameters", {})

suffix_to_docker: defaultdict[dict, dict[str, str]] = defaultdict(dict)
for filename, docker in docker_parameters.items():
if filename.startswith("action_"):
actions_docker_params[docker].append((filename, validator))
suffix_to_docker[filename.lstrip("action_")]["action"] = docker

elif filename.startswith("trigger_"):
triggers_docker_params[docker].append((filename, validator))
suffix_to_docker[filename.lstrip("trigger_")]["trigger"] = docker

elif filename.startswith("connector_"):
connectors_docker_params[docker].append((filename, validator))
suffix_to_docker[filename.lstrip("connector_")][
"connector"
] = docker

for suffix, data in suffix_to_docker.items():
# ignore cases where we have only
# either `trigger_` or `connector_` files
if "connector" not in data or "trigger" not in data:
continue

if data["connector"] != data["trigger"]:
filename_to_fix = f"connector_{suffix}"
filepath = module_path / filename_to_fix
validator.result.errors.append(
CheckError(
filepath=filepath,
error=f"`docker_parameters` is not "
f"consistent with trigger_{suffix}",
)
)
# We don't want to check these further
del triggers_docker_params[data["trigger"]]
del connectors_docker_params[data["connector"]]

self.check_uniqueness(
actions_docker_params, error_msg="`docker_parameters` is not unique"
)
self.check_uniqueness(
triggers_docker_params, error_msg="`docker_parameters` is not unique"
)
self.check_uniqueness(
connectors_docker_params, error_msg="`docker_parameters` is not unique"
)

def check_uuids_and_slugs(self, validators: list[ModuleValidator]):
manifest_uuids = defaultdict(list)
manifest_slugs = defaultdict(list)
actions_uuids = defaultdict(list)
triggers_uuids = defaultdict(list)
connectors_uuids = defaultdict(list)

for validator in validators:
module_path = validator.result.options["path"]

module_slug = validator.result.options.get("module_slug")
if module_slug:
manifest_slugs[module_slug].append(("manifest.json", validator))

uuids = validator.result.options.get("uuid_to_check", {})

suffix_to_uuid: defaultdict[dict, dict[str, str]] = defaultdict(dict)
for filename, uuid in uuids.items():
if filename == "manifest.json":
manifest_uuids[uuid].append((filename, validator))

elif filename.startswith("action_"):
actions_uuids[uuid].append((filename, validator))

elif filename.startswith("trigger_"):
triggers_uuids[uuid].append((filename, validator))
suffix_to_uuid[filename.lstrip("trigger_")]["trigger"] = uuid

elif filename.startswith("connector_"):
connectors_uuids[uuid].append((filename, validator))
suffix_to_uuid[filename.lstrip("connector_")]["connector"] = uuid

for suffix, data in suffix_to_uuid.items():
# ignore cases where we have only either
# `trigger_` or `connector_` files
if "connector" not in data or "trigger" not in data:
continue

if data["connector"] != data["trigger"]:
filename_to_fix = f"connector_{suffix}"
filepath = module_path / filename_to_fix
validator.result.errors.append(
CheckError(
filepath=filepath,
error=f"UUID is not consistent with trigger_{suffix}",
fix_label=f"Set the same UUID for "
f"trigger_{suffix} and connector_{suffix}",
fix=partial(
self.fix_set_uuid,
file_path=filepath,
uuid=data["trigger"],
),
)
)
# We don't want to check these further
del triggers_uuids[data["trigger"]]
del connectors_uuids[data["connector"]]

# check UUIDs from each group separately
self.check_uniqueness(manifest_slugs, error_msg="slug is not unique")
self.check_uniqueness(manifest_uuids, error_msg="UUID is not unique")
self.check_uniqueness(actions_uuids, error_msg="UUID is not unique")
self.check_uniqueness(connectors_uuids, error_msg="UUID is not unique")
self.check_uniqueness(triggers_uuids, error_msg="UUID is not unique")
Empty file.
30 changes: 30 additions & 0 deletions sekoia_automation/scripts/compliance/validators/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# ruff: noqa
from pathlib import Path

from .actions_json import ActionsJSONValidator
from .changelog import ChangelogValidator
from .connectors_json import ConnectorsJSONValidator
from .deps import DependenciesValidator
from .dockerfile import DockerfileValidator
from .logo import LogoValidator
from .main import MainPYValidator
from .manifest import ManifestValidator
from .tests import TestsValidator
from .triggers_json import TriggersJSONValidator
from .module import ModuleValidator

MODULES_PATH = Path(__file__).parent.parent.parent.parent

__all__ = (
"ActionsJSONValidator",
"ChangelogValidator",
"ConnectorsJSONValidator",
"DependenciesValidator",
"DockerfileValidator",
"LogoValidator",
"MainPYValidator",
"ManifestValidator",
"TestsValidator",
"TriggersJSONValidator",
"ModuleValidator",
)
Loading
Loading