diff --git a/.github/workflows/cd-release.yaml b/.github/workflows/cd-release.yaml new file mode 100644 index 0000000000..65e20792d5 --- /dev/null +++ b/.github/workflows/cd-release.yaml @@ -0,0 +1,31 @@ +name: "[CD] Create release" +on: + push: + tags: + - "v1*" + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Build + run: | + pip install -e . + scripts/release/generator.py + scripts/release/changelog.py --version ${{ github.ref_name }} + - name: Release + uses: softprops/action-gh-release@v2 + if: startsWith(github.ref, 'refs/tags/v1') + with: + name: Release ${{ github.ref_name }} + token: ${{ secrets.GH_PAT }} + body_path: ${{ github.workspace }}/build/CHANGELOG.md + files: | + build/schemas-cfnlint.zip + build/schemas-draft7.zip diff --git a/.github/workflows/ci-pr-coverage.yaml b/.github/workflows/ci-pr-coverage.yaml index 89b9fe9546..d8ed0a56b9 100644 --- a/.github/workflows/ci-pr-coverage.yaml +++ b/.github/workflows/ci-pr-coverage.yaml @@ -31,7 +31,7 @@ jobs: ref: ${{ github.event.workflow_run.head_sha }} path: repo_clone - name: Upload coverage report - if: '!cancelled()' + if: "!cancelled()" uses: codecov/codecov-action@v3 with: override_commit: ${{ github.event.workflow_run.head_sha }} diff --git a/.github/workflows/ci-pr.yaml b/.github/workflows/ci-pr.yaml index 8fab70662c..033b689187 100644 --- a/.github/workflows/ci-pr.yaml +++ b/.github/workflows/ci-pr.yaml @@ -8,7 +8,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python: [ "3.8", "3.9", "3.10", "3.11", "3.12" ] + python: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/maintenance-v1.yaml b/.github/workflows/maintenance-v1.yaml index b08f92b63f..4355411a7c 100644 --- a/.github/workflows/maintenance-v1.yaml +++ b/.github/workflows/maintenance-v1.yaml @@ -1,7 +1,7 @@ name: Automated Maintenance v1 on: schedule: - - cron: '0 0,6,12,18 * * *' + - cron: "0 0,6,12,18 * * *" workflow_dispatch: # Enables on-demand/manual triggering: https://docs.github.com/en/free-pro-team@latest/actions/managing-workflow-runs/manually-running-a-workflow jobs: job: @@ -9,12 +9,11 @@ jobs: steps: - uses: actions/checkout@v4 with: - ref: 'main' + ref: "main" - uses: actions/setup-python@v5 with: python-version: 3 - - - id: maintenance + - id: maintenance run: | latest_sam_cli=`curl -s https://api.github.com/repos/aws/aws-sam-cli/releases/latest | jq -r .tag_name | cut -c 2-` latest=`curl "https://pypi.org/pypi/aws-sam-cli/$latest_sam_cli/json" -s | jq -r '.info.requires_dist[] | select(contains("aws-sam-translator"))' | cut -c 21-` @@ -44,4 +43,4 @@ jobs: Automated changes by [create-pull-request](https://github.com/peter-evans/create-pull-request) GitHub action delete-branch: true - title: chore(schemas) - Update CloudFormation schemas to ${{ steps.maintenance.outputs.date }} + title: Update CloudFormation schemas to ${{ steps.maintenance.outputs.date }} diff --git a/scripts/release/_translator.py b/scripts/release/_translator.py new file mode 100644 index 0000000000..18501c994c --- /dev/null +++ b/scripts/release/_translator.py @@ -0,0 +1,95 @@ +""" +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +from __future__ import annotations + +# Translate cfn-lint unique keywords into json schema keywords +import logging +from collections import deque +from typing import Any, Iterator + +from cfnlint.schema import PROVIDER_SCHEMA_MANAGER + +logger = logging.getLogger(__name__) + + +def required_xor(properties: list[str]) -> dict[str, list[Any]]: + + return {"oneOf": [{"required": [p]} for p in properties]} + + +def dependent_excluded(properties: dict[str, list[str]]) -> dict[str, list[Any]]: + dependencies: dict[str, Any] = {"dependencies": {}} + for prop, exclusions in properties.items(): + dependencies["dependencies"][prop] = {"not": {"anyOf": []}} + for exclusion in exclusions: + dependencies["dependencies"][prop]["not"]["anyOf"].append( + {"required": exclusion} + ) + + return dependencies + + +_keywords = { + "requiredXor": required_xor, + "dependentExcluded": dependent_excluded, +} + + +def _find_keywords(schema: Any) -> Iterator[deque[str | int]]: + + if isinstance(schema, list): + for i, item in enumerate(schema): + for path in _find_keywords(item): + path.appendleft(i) + yield path + elif isinstance(schema, dict): + for key, value in schema.items(): + if key in _keywords: + yield deque([key, value]) + else: + for path in _find_keywords(value): + path.appendleft(key) + yield path + + +def translator(resource_type: str, region: str): + keywords = list( + _find_keywords( + PROVIDER_SCHEMA_MANAGER.get_resource_schema( + region=region, resource_type=resource_type + ).schema + ) + ) + + for keyword in keywords: + value = keyword.pop() + key = keyword.pop() + if not keyword: + path = "" + else: + path = f"/{'/'.join(str(k) for k in keyword)}" + + patch = [ + { + "op": "add", + "path": f"{path}/allOf", + "value": [], + } + ] + + logger.info(f"Patch {resource_type} add allOf for {key}") + PROVIDER_SCHEMA_MANAGER._schemas[region][resource_type].patch(patches=patch) + + patch = [ + { + "op": "remove", + "path": f"{path}/{key}", + }, + {"op": "add", "path": f"{path}/allOf/-", "value": _keywords[key](value)}, # type: ignore + ] + + logger.info(f"Patch {resource_type} replace for {key}") + PROVIDER_SCHEMA_MANAGER._schemas[region][resource_type].patch(patches=patch) diff --git a/scripts/release/changelog.py b/scripts/release/changelog.py new file mode 100755 index 0000000000..5334212ab7 --- /dev/null +++ b/scripts/release/changelog.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +""" +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" + +import argparse +from pathlib import Path + +parser = argparse.ArgumentParser() +parser.add_argument("--version") +args = parser.parse_args() + +with open("CHANGELOG.md", "r") as f: + text = f.read() + +output = [] + +for line in text.splitlines(): + + if line.startswith("### "): + if args.version == line[3:].strip(): + found = True + elif found: + break + else: + if found: + output.append(line) + +build_dir = Path("build") +with open(build_dir / "CHANGELOG.md", "w") as f: + f.write("\n".join(output)) diff --git a/scripts/release/generator.py b/scripts/release/generator.py new file mode 100755 index 0000000000..987441a6ae --- /dev/null +++ b/scripts/release/generator.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python +""" +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +SPDX-License-Identifier: MIT-0 +""" +import logging +import tarfile +from collections import deque +from pathlib import Path + +import _translator + +from cfnlint.helpers import REGIONS, ToPy, format_json_string, load_plugins +from cfnlint.schema import PROVIDER_SCHEMA_MANAGER + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + + +def _get_schema_path(schema, path): + s = schema.schema + schema_path = deque([]) + while path: + key = path.popleft() + if key == "*": + schema_path.append("items") + s = s["items"] + else: + s = s["properties"][key] + schema_path.extend(["properties", key]) + + pointer = s.get("$ref") + if pointer: + _, s = schema.resolver.resolve(pointer) + schema_path = deque(pointer.split("/")[1:]) + + return schema_path + + +def _build_patch(path, patch): + if not path: + path_str = "/allOf" + else: + path_str = f"/{'/'.join(path)}/allOf" + + return ( + [ + { + "op": "add", + "path": path_str, + "value": [], + } + ], + [ + { + "op": "add", + "path": f"{path_str}/-", + "value": patch, + } + ], + ) + + +schemas = {} + +########################## +# +# Build the definitive list of all resource types across all regions +# +########################### + +for region in ["us-east-1"] + list((set(REGIONS) - set(["us-east-1"]))): + for resource_type in PROVIDER_SCHEMA_MANAGER.get_resource_types(region): + if resource_type in ["AWS::CDK::Metadata", "Module"]: + continue + if resource_type not in schemas: + schemas[resource_type] = region + + +########################## +# +# Merge in rule schemas into the resource schemas +# +########################### + +rules_folder = Path("src") / "cfnlint" / "rules" + +rules = load_plugins( + rules_folder, + name="CfnLintJsonSchema", + modules=( + "cfnlint.rules.jsonschema.CfnLintJsonSchema", + "cfnlint.rules.jsonschema.CfnLintJsonSchema.CfnLintJsonSchema", + ), +) + +for rule in rules: + if rule.__class__.__base__ == ( + "cfnlint.rules.jsonschema." + "CfnLintJsonSchemaRegional.CfnLintJsonSchemaRegional" + ): + continue + if not rule.id or rule.schema == {}: + continue + + for keyword in rule.keywords: + if not keyword.startswith("Resources/"): + continue + path = deque(keyword.split("/")) + + if len(path) < 3: + continue + + path.popleft() + resource_type = path.popleft() + resource_properties = path.popleft() + if resource_type not in schemas and resource_properties != "Properties": + continue + + schema_path = _get_schema_path( + PROVIDER_SCHEMA_MANAGER.get_resource_schema( + schemas[resource_type], resource_type + ), + path, + ) + all_of_patch, schema_patch = _build_patch(schema_path, rule.schema) + + PROVIDER_SCHEMA_MANAGER._schemas[schemas[resource_type]][resource_type].patch( + patches=all_of_patch + ) + PROVIDER_SCHEMA_MANAGER._schemas[schemas[resource_type]][resource_type].patch( + patches=schema_patch + ) + + logger.info(f"Patch {rule.id} for {resource_type} in {schemas[resource_type]}") + + +build_dir = Path("build") +schemas_dir = build_dir / "schemas" +schemas_cfnlint_dir = schemas_dir / "cfnlint" +schemas_cfnlint_dir.mkdir(parents=True, exist_ok=True) + +schemas_draft7_dir = schemas_dir / "draft7" +schemas_draft7_dir.mkdir(parents=True, exist_ok=True) + +for resource_type, region in schemas.items(): + rt_py = ToPy(resource_type) + + with open(schemas_cfnlint_dir / f"{rt_py.py}.json", "w") as f: + f.write( + format_json_string( + PROVIDER_SCHEMA_MANAGER.get_resource_schema( + region, resource_type + ).schema + ) + ) + + _translator.translator(resource_type, region) + + with open(schemas_draft7_dir / f"{rt_py.py}.json", "w") as f: + f.write( + format_json_string( + PROVIDER_SCHEMA_MANAGER.get_resource_schema( + region, resource_type + ).schema + ) + ) + +logger.info("Create schema package") +with tarfile.open(build_dir / "schemas-cfnlint.zip", "w:gz") as tar: + tar.add(schemas_cfnlint_dir, arcname="schemas") + +with tarfile.open(build_dir / "schemas-draft7.zip", "w:gz") as tar: + tar.add(schemas_draft7_dir, arcname="schemas") diff --git a/src/cfnlint/data/schemas/extensions/aws_elasticloadbalancingv2_loadbalancer/application_subnets.json b/src/cfnlint/data/schemas/extensions/aws_elasticloadbalancingv2_loadbalancer/application_subnets.json index 615983bd23..2011ec9cad 100644 --- a/src/cfnlint/data/schemas/extensions/aws_elasticloadbalancingv2_loadbalancer/application_subnets.json +++ b/src/cfnlint/data/schemas/extensions/aws_elasticloadbalancingv2_loadbalancer/application_subnets.json @@ -26,10 +26,6 @@ "Subnets": { "minItems": 2 } - }, - "requiredXor": [ - "Subnets", - "SubnetMappings" - ] + } } }